azkaban-uncached

Changes

src/java/azkaban/jmx/JmxScheduler.java 36(+0 -36)

src/java/azkaban/jmx/JmxSchedulerMBean.java 18(+0 -18)

src/java/azkaban/jmx/JmxSLAManager.java 45(+0 -45)

src/java/azkaban/jmx/JmxSLAManagerMBean.java 20(+0 -20)

src/java/azkaban/scheduler/JdbcScheduleLoader.java 348(+0 -348)

src/java/azkaban/sla/JdbcSLALoader.java 299(+0 -299)

src/java/azkaban/sla/SLA.java 267(+0 -267)

src/java/azkaban/sla/SLALoader.java 30(+0 -30)

src/java/azkaban/sla/SlaMailer.java 79(+0 -79)

src/java/azkaban/sla/SLAManager.java 484(+0 -484)

src/java/azkaban/sla/SlaOptions.java 67(+0 -67)

unit/java/azkaban/scheduler/JdbcScheduleLoaderTest.java 325(+0 -325)

Details

diff --git a/lib/commons-jexl-2.1.1.jar b/lib/commons-jexl-2.1.1.jar
new file mode 100644
index 0000000..ab288a8
Binary files /dev/null and b/lib/commons-jexl-2.1.1.jar differ
diff --git a/src/java/azkaban/alert/Alerter.java b/src/java/azkaban/alert/Alerter.java
new file mode 100644
index 0000000..1ba02a8
--- /dev/null
+++ b/src/java/azkaban/alert/Alerter.java
@@ -0,0 +1,11 @@
+package azkaban.alert;
+
+import azkaban.executor.ExecutableFlow;
+import azkaban.sla.SlaOption;
+
+public interface Alerter {
+	void alertOnSuccess(ExecutableFlow exflow) throws Exception;
+	void alertOnError(ExecutableFlow exflow, String ... extraReasons) throws Exception;
+	void alertOnFirstError(ExecutableFlow exflow) throws Exception;
+	void alertOnSla(SlaOption slaOption, String slaMessage) throws Exception;
+}
diff --git a/src/java/azkaban/execapp/JobRunner.java b/src/java/azkaban/execapp/JobRunner.java
index 4b187e6..40163de 100644
--- a/src/java/azkaban/execapp/JobRunner.java
+++ b/src/java/azkaban/execapp/JobRunner.java
@@ -158,6 +158,7 @@ public class JobRunner extends EventHandler implements Runnable {
 				fileAppender.setMaxFileSize(jobLogChunkSize);
 				jobAppender = fileAppender;
 				logger.addAppender(jobAppender);
+				logger.setAdditivity(false);
 			} catch (IOException e) {
 				flowLogger.error("Could not open log file in " + workingDir + " for job " + node.getJobId(), e);
 			}
diff --git a/src/java/azkaban/executor/ExecutableFlow.java b/src/java/azkaban/executor/ExecutableFlow.java
index 5bae020..eaa27a6 100644
--- a/src/java/azkaban/executor/ExecutableFlow.java
+++ b/src/java/azkaban/executor/ExecutableFlow.java
@@ -479,4 +479,15 @@ public class ExecutableFlow {
 	public void setVersion(int version) {
 		this.version = version;
 	}
+	
+	public static boolean isFinished(ExecutableFlow flow) {
+		switch(flow.getStatus()) {
+		case SUCCEEDED:
+		case FAILED:
+		case KILLED:
+			return true;
+		default:
+			return false;
+		}
+	}
 }
diff --git a/src/java/azkaban/executor/ExecutorManager.java b/src/java/azkaban/executor/ExecutorManager.java
index f512b4d..fd8a16c 100644
--- a/src/java/azkaban/executor/ExecutorManager.java
+++ b/src/java/azkaban/executor/ExecutorManager.java
@@ -40,6 +40,7 @@ import org.apache.http.impl.client.DefaultHttpClient;
 import org.apache.log4j.Logger;
 import org.joda.time.DateTime;
 
+import azkaban.alert.Alerter;
 import azkaban.project.Project;
 import azkaban.scheduler.ScheduleStatisticManager;
 import azkaban.utils.FileIOUtils.JobMetaData;
@@ -52,7 +53,7 @@ import azkaban.utils.Props;
  * Executor manager used to manage the client side job.
  *
  */
-public class ExecutorManager {
+public class ExecutorManager implements ExecutorManagerAdapter {
 	private static Logger logger = Logger.getLogger(ExecutorManager.class);
 	private ExecutorLoader executorLoader;
 	private String executorHost;
@@ -63,7 +64,6 @@ public class ExecutorManager {
 	private ConcurrentHashMap<Integer, Pair<ExecutionReference, ExecutableFlow>> runningFlows = new ConcurrentHashMap<Integer, Pair<ExecutionReference, ExecutableFlow>>();
 	private ConcurrentHashMap<Integer, ExecutableFlow> recentlyFinished = new ConcurrentHashMap<Integer, ExecutableFlow>();
 
-	private ExecutorMailer mailer;
 	private ExecutingManagerUpdaterThread executingManager;
 	
 	private static final long DEFAULT_EXECUTION_LOGS_RETENTION_MS = 3*4*7*24*60*60*1000l;
@@ -71,34 +71,33 @@ public class ExecutorManager {
 	
 	private long lastThreadCheckTime = -1;
 	private String updaterStage = "not started";
+
+	private Map<String, Alerter> alerters;
 	
 	File cacheDir;
 	
-	public ExecutorManager(Props props, ExecutorLoader loader) throws ExecutorManagerException {
+	public ExecutorManager(Props props, ExecutorLoader loader, Map<String, Alerter> alters) throws ExecutorManagerException {
+		
 		this.executorLoader = loader;
 		this.loadRunningFlows();
-		
 		executorHost = props.getString("executor.host", "localhost");
 		executorPort = props.getInt("executor.port");
+		
+		alerters = alters;
+		
 		cacheDir = new File(props.getString("cache.directory", "cache"));
-		mailer = new ExecutorMailer(props);
+
 		executingManager = new ExecutingManagerUpdaterThread();
 		executingManager.start();
-
+		
 		long executionLogsRetentionMs = props.getLong("execution.logs.retention.ms", DEFAULT_EXECUTION_LOGS_RETENTION_MS);
 		cleanerThread = new CleanerThread(executionLogsRetentionMs);
 		cleanerThread.start();
+		
 	}
 	
-	public String getExecutorHost() {
-		return executorHost;
-	}
-	
-	public int getExecutorPort() {
-		return executorPort;
-	}
-	
-	public State getExecutorThreadState() {
+	@Override
+	public State getExecutorManagerThreadState() {
 		return executingManager.getState();
 	}
 	
@@ -106,11 +105,13 @@ public class ExecutorManager {
 		return updaterStage;
 	}
 	
-	public boolean isThreadActive() {
+	@Override
+	public boolean isExecutorManagerThreadActive() {
 		return executingManager.isAlive();
 	}
 	
-	public long getLastThreadCheckTime() {
+	@Override
+	public long getLastExecutorManagerThreadCheckTime() {
 		return lastThreadCheckTime;
 	}
 	
@@ -118,6 +119,7 @@ public class ExecutorManager {
 		return this.lastCleanerThreadCheckTime;
 	}
 	
+	@Override
 	public Set<String> getPrimaryServerHosts() {
 		// Only one for now. More probably later.
 		HashSet<String> ports = new HashSet<String>();
@@ -125,6 +127,7 @@ public class ExecutorManager {
 		return ports;
 	}
 	
+	@Override
 	public Set<String> getAllActiveExecutorServerHosts() {
 		// Includes non primary server/hosts
 		HashSet<String> ports = new HashSet<String>();
@@ -141,38 +144,37 @@ public class ExecutorManager {
 		runningFlows.putAll(executorLoader.fetchActiveFlows());
 	}
 	
+	@Override
 	public List<Integer> getRunningFlows(int projectId, String flowId) {
 		ArrayList<Integer> executionIds = new ArrayList<Integer>();
 		for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
-			if (ref.getSecond().getFlowId().equals(flowId)) {
+			if (ref.getSecond().getFlowId().equals(flowId) && ref.getSecond().getProjectId() == projectId) {
 				executionIds.add(ref.getFirst().getExecId());
 			}
 		}
-		
 		return executionIds;
 	}
 	
+	@Override
 	public boolean isFlowRunning(int projectId, String flowId) {
 		for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
-
 			if (ref.getSecond().getProjectId() == projectId && ref.getSecond().getFlowId().equals(flowId)) {
 				return true;
 			}
 		}
-		
 		return false;
 	}
 	
+	@Override
 	public ExecutableFlow getExecutableFlow(int execId) throws ExecutorManagerException {
 		Pair<ExecutionReference, ExecutableFlow> active = runningFlows.get(execId);
-		
 		if (active == null) {
 			return executorLoader.fetchExecutableFlow(execId);
 		}
-
 		return active.getSecond();
 	}
 	
+	@Override
 	public List<ExecutableFlow> getRunningFlows() {
 		ArrayList<ExecutableFlow> flows = new ArrayList<ExecutableFlow>();
 		for (Pair<ExecutionReference, ExecutableFlow> ref : runningFlows.values()) {
@@ -194,39 +196,47 @@ public class ExecutorManager {
 		return new ArrayList<ExecutableFlow>(recentlyFinished.values());
 	}
 	
+	@Override
 	public List<ExecutableFlow> getExecutableFlows(Project project, String flowId, int skip, int size) throws ExecutorManagerException {
 		List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(project.getId(), flowId, skip, size);
 		return flows;
 	}
 	
+	@Override
 	public List<ExecutableFlow> getExecutableFlows(int skip, int size) throws ExecutorManagerException {
 		List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(skip, size);
 		return flows;
 	}
 	
+	@Override
 	public List<ExecutableFlow> getExecutableFlows(String flowIdContains, int skip, int size) throws ExecutorManagerException {
 		List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(null, '%'+flowIdContains+'%', null, 0, -1, -1 , skip, size);
 		return flows;
 	}
 
+	@Override
 	public List<ExecutableFlow> getExecutableFlows(String projContain, String flowContain, String userContain, int status, long begin, long end, int skip, int size) throws ExecutorManagerException {
 		List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(projContain, flowContain, userContain, status, begin, end , skip, size);
 		return flows;
 	}
 	
+	@Override
 	public List<ExecutableJobInfo> getExecutableJobs(Project project, String jobId, int skip, int size) throws ExecutorManagerException {
 		List<ExecutableJobInfo> nodes = executorLoader.fetchJobHistory(project.getId(), jobId, skip, size);
 		return nodes;
 	}
 	
+	@Override
 	public int getNumberOfJobExecutions(Project project, String jobId) throws ExecutorManagerException{
 		return executorLoader.fetchNumExecutableNodes(project.getId(), jobId);
 	}
 	
+	@Override
 	public int getNumberOfExecutions(Project project, String flowId) throws ExecutorManagerException{
 		return executorLoader.fetchNumExecutableFlows(project.getId(), flowId);
 	}
 	
+	@Override
 	public LogData getExecutableFlowLog(ExecutableFlow exFlow, int offset, int length) throws ExecutorManagerException {
 		Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
 		if (pair != null) {
@@ -244,10 +254,10 @@ public class ExecutorManager {
 		}
 	}
 	
+	@Override
 	public LogData getExecutionJobLog(ExecutableFlow exFlow, String jobId, int offset, int length, int attempt) throws ExecutorManagerException {
 		Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
 		if (pair != null) {
-
 			Pair<String,String> typeParam = new Pair<String,String>("type", "job");
 			Pair<String,String> jobIdParam = new Pair<String,String>("jobId", jobId);
 			Pair<String,String> offsetParam = new Pair<String,String>("offset", String.valueOf(offset));
@@ -264,6 +274,7 @@ public class ExecutorManager {
 		}
 	}
 	
+	@Override
 	public JobMetaData getExecutionJobMetaData(ExecutableFlow exFlow, String jobId, int offset, int length, int attempt) throws ExecutorManagerException {
 		Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
 		if (pair != null) {
@@ -283,6 +294,7 @@ public class ExecutorManager {
 		}
 	}
 	
+	@Override
 	public void cancelFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
 		synchronized(exFlow) {
 			Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
@@ -293,6 +305,7 @@ public class ExecutorManager {
 		}
 	}
 	
+	@Override
 	public void resumeFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
 		synchronized(exFlow) {
 			Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
@@ -303,6 +316,7 @@ public class ExecutorManager {
 		}
 	}
 	
+	@Override
 	public void pauseFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
 		synchronized(exFlow) {
 			Pair<ExecutionReference, ExecutableFlow> pair = runningFlows.get(exFlow.getExecutionId());
@@ -313,30 +327,37 @@ public class ExecutorManager {
 		}
 	}
 	
+	@Override
 	public void pauseExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
 		modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_PAUSE_JOBS, userId, jobIds);
 	}
 	
+	@Override
 	public void resumeExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
 		modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RESUME_JOBS, userId, jobIds);
 	}
 	
+	@Override
 	public void retryFailures(ExecutableFlow exFlow, String userId) throws ExecutorManagerException {
 		modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RETRY_FAILURES, userId);
 	}
 	
+	@Override
 	public void retryExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
 		modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_RETRY_JOBS, userId, jobIds);
 	}
 	
+	@Override
 	public void disableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
 		modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_DISABLE_JOBS, userId, jobIds);
 	}
 	
+	@Override
 	public void enableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
 		modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_ENABLE_JOBS, userId, jobIds);
 	}
 	
+	@Override
 	public void cancelExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException {
 		modifyExecutingJobs(exFlow, ConnectorParams.MODIFY_CANCEL_JOBS, userId, jobIds);
 	}
@@ -379,9 +400,10 @@ public class ExecutorManager {
 		}
 	}
 	
-	public String submitExecutableFlow(ExecutableFlow exflow) throws ExecutorManagerException {
+	@Override
+	public String submitExecutableFlow(ExecutableFlow exflow, String userId) throws ExecutorManagerException {
 		synchronized(exflow) {
-			logger.info("Submitting execution flow " + exflow.getFlowId());
+			logger.info("Submitting execution flow " + exflow.getFlowId() + " by " + userId);
 
 			int projectId = exflow.getProjectId();
 			String flowId = exflow.getFlowId();
@@ -441,7 +463,7 @@ public class ExecutorManager {
 	}
 	
 	
-	public void cleanOldExecutionLogs(long millis) {
+	private void cleanOldExecutionLogs(long millis) {
 		try {
 			int count = executorLoader.removeExecutionLogsByTime(millis);
 			logger.info("Cleaned up " + count + " log entries.");
@@ -537,6 +559,7 @@ public class ExecutorManager {
 		return jsonResponse;
 	}
 	
+	@Override
 	public Map<String, Object> callExecutorJMX(String hostPort, String action, String mBean) throws IOException {
 		URIBuilder builder = new URIBuilder();
 		
@@ -581,6 +604,7 @@ public class ExecutorManager {
 		return jsonResponse;
 	}
 	
+	@Override
 	public void shutdown() {
 		executingManager.shutdown();
 	}
@@ -610,7 +634,9 @@ public class ExecutorManager {
 			while(!shutdown) {
 				try {
 					lastThreadCheckTime = System.currentTimeMillis();
-					
+
+//					loadRunningFlows();
+
 					updaterStage = "Starting update all flows.";
 					
 					Map<ConnectionInfo, List<ExecutableFlow>> exFlowMap = getFlowToExecutorMap();
@@ -730,6 +756,7 @@ public class ExecutorManager {
 	}
 	
 	private void finalizeFlows(ExecutableFlow flow) {
+
 		int execId = flow.getExecutionId();
 		
 		updaterStage = "finalizing flow " + execId;
@@ -752,6 +779,7 @@ public class ExecutorManager {
 			}
 
 			updaterStage = "finalizing flow " + execId + " deleting active reference";
+			
 			// Delete the executing reference.
 			if (flow.getEndTime() == -1) {
 				flow.setEndTime(System.currentTimeMillis());
@@ -762,6 +790,7 @@ public class ExecutorManager {
 			updaterStage = "finalizing flow " + execId + " cleaning from memory";
 			runningFlows.remove(execId);
 			recentlyFinished.put(execId, dsFlow);
+
 		} catch (ExecutorManagerException e) {
 			logger.error(e);
 		}
@@ -772,27 +801,61 @@ public class ExecutorManager {
 		updaterStage = "finalizing flow " + execId + " alerting and emailing";
 		ExecutionOptions options = flow.getExecutionOptions();
 		// But we can definitely email them.
+		Alerter mailAlerter = alerters.get("email");
 		if(flow.getStatus() == Status.FAILED || flow.getStatus() == Status.KILLED)
 		{
 			if(options.getFailureEmails() != null && !options.getFailureEmails().isEmpty())
 			{
 				try {
-					mailer.sendErrorEmail(flow, "Executor no longer seems to be running this execution. Most likely due to executor bounce.");
+					mailAlerter.alertOnError(flow, "Executor no longer seems to be running this execution. Most likely due to executor bounce.");
 				} catch (Exception e) {
 					logger.error(e);
 				}
 			}
+			if(options.getFlowParameters().containsKey("alert.type")) {
+				String alertType = options.getFlowParameters().get("alert.type");
+				Alerter alerter = alerters.get(alertType);
+				if(alerter != null) {
+					try {
+						alerter.alertOnError(flow, "Executor no longer seems to be running this execution. Most likely due to executor bounce.");
+					} catch (Exception e) {
+						// TODO Auto-generated catch block
+						e.printStackTrace();
+						logger.error("Failed to alert by " + alertType);
+					}
+				}
+				else {
+					logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
+				}
+			}
 		}
 		else
 		{
 			if(options.getSuccessEmails() != null && !options.getSuccessEmails().isEmpty())
 			{
 				try {
-					mailer.sendSuccessEmail(flow);
+					
+					mailAlerter.alertOnSuccess(flow);
 				} catch (Exception e) {
 					logger.error(e);
 				}
 			}
+			if(options.getFlowParameters().containsKey("alert.type")) {
+				String alertType = options.getFlowParameters().get("alert.type");
+				Alerter alerter = alerters.get(alertType);
+				if(alerter != null) {
+					try {
+						alerter.alertOnSuccess(flow);
+					} catch (Exception e) {
+						// TODO Auto-generated catch block
+						e.printStackTrace();
+						logger.error("Failed to alert by " + alertType);
+					}
+				}
+				else {
+					logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
+				}
+			}
 		}
 		
 	}
@@ -845,6 +908,7 @@ public class ExecutorManager {
 	}
 	
 	private ExecutableFlow updateExecution(Map<String,Object> updateData) throws ExecutorManagerException {
+		
 		Integer execId = (Integer)updateData.get(ConnectorParams.UPDATE_MAP_EXEC_ID);
 		if (execId == null) {
 			throw new ExecutorManagerException("Response is malformed. Need exec id to update.");
@@ -873,7 +937,29 @@ public class ExecutorManager {
 		if (oldStatus != newStatus && newStatus.equals(Status.FAILED_FINISHING)) {
 			// We want to see if we should give an email status on first failure.
 			if (options.getNotifyOnFirstFailure()) {
-				mailer.sendFirstErrorMessage(flow);
+				Alerter mailAlerter = alerters.get("email");
+				try {
+					mailAlerter.alertOnFirstError(flow);
+				} catch (Exception e) {
+					e.printStackTrace();
+					logger.error("Failed to send first error email." + e.getMessage());
+				}
+			}
+			if(options.getFlowParameters().containsKey("alert.type")) {
+				String alertType = options.getFlowParameters().get("alert.type");
+				Alerter alerter = alerters.get(alertType);
+				if(alerter != null) {
+					try {
+						alerter.alertOnFirstError(flow);
+					} catch (Exception e) {
+						// TODO Auto-generated catch block
+						e.printStackTrace();
+						logger.error("Failed to alert by " + alertType);
+					}
+				}
+				else {
+					logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
+				}
 			}
 		}
 		
@@ -983,12 +1069,14 @@ public class ExecutorManager {
 		}
 	}
 	
+	@Override
 	public int getExecutableFlows(int projectId, String flowId, int from, int length, List<ExecutableFlow> outputList) throws ExecutorManagerException {
 		List<ExecutableFlow> flows = executorLoader.fetchFlowHistory(projectId, flowId, from, length);
 		outputList.addAll(flows);
 		return executorLoader.fetchNumExecutableFlows(projectId, flowId);
 	}
 
+	@Override
 	public List<ExecutableFlow> getExecutableFlows(int projectId, String flowId, int from, int length, Status status) throws ExecutorManagerException {
 		return executorLoader.fetchFlowHistory(projectId, flowId, from, length, status);
 	}
diff --git a/src/java/azkaban/executor/ExecutorManagerAdapter.java b/src/java/azkaban/executor/ExecutorManagerAdapter.java
new file mode 100644
index 0000000..85b55df
--- /dev/null
+++ b/src/java/azkaban/executor/ExecutorManagerAdapter.java
@@ -0,0 +1,132 @@
+package azkaban.executor;
+
+import java.io.IOException;
+import java.lang.Thread.State;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import azkaban.project.Project;
+import azkaban.utils.FileIOUtils.JobMetaData;
+import azkaban.utils.FileIOUtils.LogData;
+
+public interface ExecutorManagerAdapter {
+	
+	public static final String LOCAL_MODE = "local";
+	public static final String REMOTE_MODE = "remote";
+	
+	public static final String REMOTE_EXECUTOR_MANAGER_HOST = "remote.executor.manager.host";
+	public static final String REMOTE_EXECUTOR_MANAGER_PORT = "remote.executor.manager.port";
+	public static final String REMOTE_EXECUTOR_MANAGER_URL = "/executormanager";
+	
+	public static final String ACTION_GET_FLOW_LOG = "getFlowLog";
+	public static final String ACTION_GET_JOB_LOG = "getJobLog";
+	public static final String ACTION_CANCEL_FLOW = "cancelFlow";
+	public static final String ACTION_SUBMIT_FLOW = "submitFlow";
+	public static final String ACTION_RESUME_FLOW = "resumeFlow";
+	public static final String ACTION_PAUSE_FLOW = "pauseFlow";
+	public static final String ACTION_MODIFY_EXECUTION = "modifyExecution"; 
+	public static final String ACTION_UPDATE = "update";
+	public static final String ACTION_GET_JMX = "getJMX";
+	
+	public static final String COMMAND_MODIFY_PAUSE_JOBS = "modifyPauseJobs";
+	public static final String COMMAND_MODIFY_RESUME_JOBS = "modifyResumeJobs";
+	public static final String COMMAND_MODIFY_RETRY_FAILURES = "modifyRetryFailures";
+	public static final String COMMAND_MODIFY_RETRY_JOBS = "modifyRetryJobs";
+	public static final String COMMAND_MODIFY_DISABLE_JOBS = "modifyDisableJobs";
+	public static final String COMMAND_MODIFY_ENABLE_JOBS = "modifyEnableJobs";
+	public static final String COMMAND_MODIFY_CANCEL_JOBS = "modifyCancelJobs";
+	
+	public static final String INFO_JMX_TYPE = "jmxType";
+	public static final String INFO_JMX_DATA = "jmxData";
+	public static final String INFO_ACTION = "action";
+	public static final String INFO_TYPE = "type";
+	public static final String INFO_EXEC_ID = "execId";
+	public static final String INFO_EXEC_FLOW_JSON = "execFlowJson";
+	public static final String INFO_PROJECT_ID = "projectId";
+	public static final String INFO_FLOW_NAME = "flowName";
+	public static final String INFO_JOB_NAME = "jobName";
+	public static final String INFO_OFFSET = "offset";
+	public static final String INFO_LENGTH = "length";
+	public static final String INFO_ATTEMPT = "attempt";
+	public static final String INFO_MODIFY_JOB_IDS = "modifyJobIds";
+	public static final String INFO_MODIFY_COMMAND = "modifyCommand";
+	public static final String INFO_MESSAGE = "message";
+	public static final String INFO_ERROR = "error";
+	public static final String INFO_UPDATE_TIME_LIST = "updateTimeList";
+	public static final String INFO_EXEC_ID_LIST = "execIdList";
+	public static final String INFO_UPDATES = "updates";
+	public static final String INFO_USER_ID = "userId";
+	public static final String INFO_LOG = "logData";
+	
+	public boolean isFlowRunning(int projectId, String flowId);
+	
+	public ExecutableFlow getExecutableFlow(int execId) throws ExecutorManagerException;
+	
+	public List<Integer> getRunningFlows(int projectId, String flowId);
+	
+	public List<ExecutableFlow> getRunningFlows() throws IOException;
+	
+	public List<ExecutableFlow> getRecentlyFinishedFlows();
+	
+	public List<ExecutableFlow> getExecutableFlows(Project project, String flowId, int skip, int size) throws ExecutorManagerException;
+	
+	public List<ExecutableFlow> getExecutableFlows(int skip, int size) throws ExecutorManagerException;
+	
+	public List<ExecutableFlow> getExecutableFlows(String flowIdContains, int skip, int size) throws ExecutorManagerException;
+	
+	public List<ExecutableFlow> getExecutableFlows(String projContain, String flowContain, String userContain, int status, long begin, long end, int skip, int size) throws ExecutorManagerException;
+
+	public int getExecutableFlows(int projectId, String flowId, int from, int length, List<ExecutableFlow> outputList) throws ExecutorManagerException;
+
+	public List<ExecutableFlow> getExecutableFlows(int projectId, String flowId, int from, int length, Status status) throws ExecutorManagerException;
+
+	public List<ExecutableJobInfo> getExecutableJobs(Project project, String jobId, int skip, int size) throws ExecutorManagerException;
+	
+	public int getNumberOfJobExecutions(Project project, String jobId) throws ExecutorManagerException;
+	
+	public int getNumberOfExecutions(Project project, String flowId) throws ExecutorManagerException;
+	
+	public LogData getExecutableFlowLog(ExecutableFlow exFlow, int offset, int length) throws ExecutorManagerException;
+	
+	public LogData getExecutionJobLog(ExecutableFlow exFlow, String jobId, int offset, int length, int attempt) throws ExecutorManagerException;
+	
+	public JobMetaData getExecutionJobMetaData(ExecutableFlow exFlow, String jobId, int offset, int length, int attempt) throws ExecutorManagerException;
+	
+	public void cancelFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
+	
+	public void resumeFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
+	
+	public void pauseFlow(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
+	
+	public void pauseExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
+	
+	public void resumeExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
+	
+	public void retryFailures(ExecutableFlow exFlow, String userId) throws ExecutorManagerException;
+	
+	public void retryExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
+	
+	public void disableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
+	
+	public void enableExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
+	
+	public void cancelExecutingJobs(ExecutableFlow exFlow, String userId, String ... jobIds) throws ExecutorManagerException;
+
+	public String submitExecutableFlow(ExecutableFlow exflow, String userId) throws ExecutorManagerException;
+	
+	public Map<String, Object> callExecutorJMX(String hostPort, String action, String mBean) throws IOException;
+
+	public void shutdown();
+
+	public Set<String> getAllActiveExecutorServerHosts();
+
+	public State getExecutorManagerThreadState();
+
+	public boolean isExecutorManagerThreadActive();
+
+	public long getLastExecutorManagerThreadCheckTime();
+
+	public Set<? extends String> getPrimaryServerHosts();
+	
+}
diff --git a/src/java/azkaban/executor/ExecutorManagerServlet.java b/src/java/azkaban/executor/ExecutorManagerServlet.java
new file mode 100644
index 0000000..ab8d5b2
--- /dev/null
+++ b/src/java/azkaban/executor/ExecutorManagerServlet.java
@@ -0,0 +1,225 @@
+package azkaban.executor;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.log4j.Logger;
+
+import azkaban.utils.FileIOUtils.LogData;
+import azkaban.utils.JSONUtils;
+import azkaban.webapp.servlet.AbstractServiceServlet;
+
+
+public class ExecutorManagerServlet extends AbstractServiceServlet {
+	private final ExecutorManagerAdapter executorManager;
+	
+	public static final String URL = "executorManager";
+	private static final long serialVersionUID = 1L;
+	private static final Logger logger = Logger.getLogger(ExecutorManagerServlet.class);
+	
+	public ExecutorManagerServlet(ExecutorManagerAdapter executorManager) {
+		this.executorManager = executorManager;
+	}
+	
+	@Override
+	public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+		HashMap<String,Object> respMap= new HashMap<String,Object>();
+		//logger.info("ExecutorServer called by " + req.getRemoteAddr());
+		try {
+			if (!hasParam(req, ExecutorManagerAdapter.INFO_ACTION)) {
+				logger.error("Parameter action not set");
+				respMap.put("error", "Parameter action not set");
+			}
+			else {
+				String action = getParam(req, ExecutorManagerAdapter.INFO_ACTION);
+				if (action.equals(ExecutorManagerAdapter.ACTION_UPDATE)) {
+					//logger.info("Updated called");
+					handleAjaxUpdateRequest(req, respMap);
+				}
+				else {
+					int execid = Integer.parseInt(getParam(req, ExecutorManagerAdapter.INFO_EXEC_ID));
+					String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID, null);
+					
+					logger.info("User " + user + " has called action " + action + " on " + execid);
+					if (action.equals(ExecutorManagerAdapter.ACTION_GET_FLOW_LOG)) { 
+						handleFetchFlowLogEvent(execid, req, resp, respMap);
+					} else if (action.equals(ExecutorManagerAdapter.ACTION_GET_JOB_LOG)) {
+						handleFetchJobLogEvent(execid, req, resp, respMap);
+					}
+					else if (action.equals(ExecutorManagerAdapter.ACTION_SUBMIT_FLOW)) {
+						handleAjaxSubmitFlow(req, respMap, execid);
+					}
+					else if (action.equals(ExecutorManagerAdapter.ACTION_CANCEL_FLOW)) {
+						logger.info("Cancel called.");
+						handleAjaxCancelFlow(respMap, execid, user);
+					}
+					else if (action.equals(ExecutorManagerAdapter.ACTION_PAUSE_FLOW)) {
+						logger.info("Paused called.");
+						handleAjaxPauseFlow(respMap, execid, user);
+					}
+					else if (action.equals(ExecutorManagerAdapter.ACTION_RESUME_FLOW)) {
+						logger.info("Resume called.");
+						handleAjaxResumeFlow(respMap, execid, user);
+					}
+					else if (action.equals(ExecutorManagerAdapter.ACTION_MODIFY_EXECUTION)) {
+						logger.info("Modify Execution Action");
+						handleModifyExecution(respMap, execid, user, req);
+					}
+					else {
+						logger.error("action: '" + action + "' not supported.");
+						respMap.put("error", "action: '" + action + "' not supported.");
+					}
+				}
+			}
+		} catch (Exception e) {
+			logger.error(e);
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e.getMessage());
+		}
+		writeJSON(resp, respMap);
+		resp.flushBuffer();
+	}
+
+	private void handleModifyExecution(HashMap<String, Object> respMap,
+			int execid, String user, HttpServletRequest req) {
+		if (!hasParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND)) {
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, "Modification command not set.");
+			return;
+		}
+
+		try {
+			String modificationType = getParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND);
+			ExecutableFlow exflow = executorManager.getExecutableFlow(execid);
+			if (ExecutorManagerAdapter.COMMAND_MODIFY_RETRY_FAILURES.equals(modificationType)) {
+				executorManager.retryFailures(exflow, user);
+			}
+			else {
+//				String modifiedJobList = getParam(req, MODIFY_JOBS_LIST);
+//				String[] jobIds = modifiedJobList.split("\\s*,\\s*");
+//				
+//				if (MODIFY_RETRY_JOBS.equals(modificationType)) {
+//				}
+//				else if (MODIFY_CANCEL_JOBS.equals(modificationType)) {
+//				}
+//				else if (MODIFY_DISABLE_JOBS.equals(modificationType)) {
+//				}
+//				else if (MODIFY_ENABLE_JOBS.equals(modificationType)) {
+//				}
+//				else if (MODIFY_PAUSE_JOBS.equals(modificationType)) {
+//				}
+//				else if (MODIFY_RESUME_JOBS.equals(modificationType)) {
+//				}
+			}
+		} catch (Exception e) {
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}
+	}
+
+	private void handleAjaxResumeFlow(HashMap<String, Object> respMap, int execid, String user) {
+		try {
+			ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+			executorManager.resumeFlow(exFlow, user);
+		} catch (Exception e) {
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}
+		
+	}
+
+	private void handleAjaxPauseFlow(HashMap<String, Object> respMap, int execid, String user) {
+		try {
+			ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+			executorManager.pauseFlow(exFlow, user);
+		} catch (Exception e) {
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}
+	}
+
+	private void handleAjaxCancelFlow(HashMap<String, Object> respMap, int execid, String user) {
+		try {
+			ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+			executorManager.cancelFlow(exFlow, user);
+		} catch (Exception e) {
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}
+	}
+
+	private void handleAjaxSubmitFlow(HttpServletRequest req, HashMap<String, Object> respMap, int execid) {
+		try{
+			String execFlowJson = getParam(req, ExecutorManagerAdapter.INFO_EXEC_FLOW_JSON);
+			ExecutableFlow exflow = ExecutableFlow.createExecutableFlowFromObject(JSONUtils.parseJSONFromString(execFlowJson));
+			String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID);
+			executorManager.submitExecutableFlow(exflow, user);
+			respMap.put(ExecutorManagerAdapter.INFO_EXEC_ID, exflow.getExecutionId());
+		} catch (Exception e) {
+			e.printStackTrace();
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}
+	}
+
+	private void handleFetchJobLogEvent(int execid, HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> respMap) {
+		try{
+			ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+			String jobId = getParam(req, ExecutorManagerAdapter.INFO_JOB_NAME);
+			int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
+			int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
+			int attempt = getIntParam(req, ExecutorManagerAdapter.INFO_ATTEMPT);
+			LogData log = executorManager.getExecutionJobLog(exFlow, jobId, offset, length, attempt);
+			respMap.put(ExecutorManagerAdapter.INFO_LOG, JSONUtils.toJSON(log.toObject()));
+		}  catch (Exception e) {
+			e.printStackTrace();
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}
+	}
+
+	private void handleFetchFlowLogEvent(int execid, HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> respMap) {
+		try{
+			ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
+			int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
+			int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
+			LogData log = executorManager.getExecutableFlowLog(exFlow, offset, length);
+			respMap.put(ExecutorManagerAdapter.INFO_LOG, JSONUtils.toJSON(log.toObject()));
+		}  catch (Exception e) {
+			e.printStackTrace();
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}
+		
+	}
+
+	@SuppressWarnings("unchecked")
+	private void handleAjaxUpdateRequest(HttpServletRequest req, HashMap<String, Object> respMap) {
+		try {
+			ArrayList<Object> updateTimesList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, ExecutorManagerAdapter.INFO_UPDATE_TIME_LIST));
+			ArrayList<Object> execIDList = (ArrayList<Object>)JSONUtils.parseJSONFromString(getParam(req, ExecutorManagerAdapter.INFO_EXEC_ID_LIST));
+			
+			ArrayList<Object> updateList = new ArrayList<Object>();
+			for (int i = 0; i < execIDList.size(); ++i) {
+				long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
+				int execId = (Integer)execIDList.get(i);
+				
+				ExecutableFlow flow = executorManager.getExecutableFlow(execId);
+				if (flow == null) {
+					Map<String, Object> errorResponse = new HashMap<String,Object>();
+					errorResponse.put(ExecutorManagerAdapter.INFO_ERROR, "Flow does not exist");
+					errorResponse.put(ExecutorManagerAdapter.INFO_EXEC_ID, execId);
+					updateList.add(errorResponse);
+					continue;
+				}
+				
+				if (flow.getUpdateTime() > updateTime) {
+					updateList.add(flow.toUpdateObject(updateTime));
+				}
+			}
+			
+			respMap.put(ExecutorManagerAdapter.INFO_UPDATES, updateList);
+		}  catch (Exception e) {
+			e.printStackTrace();
+			respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
+		}		
+	}
+	
+}
diff --git a/src/java/azkaban/executor/mail/DefaultMailCreator.java b/src/java/azkaban/executor/mail/DefaultMailCreator.java
index fcebcac..0802cae 100644
--- a/src/java/azkaban/executor/mail/DefaultMailCreator.java
+++ b/src/java/azkaban/executor/mail/DefaultMailCreator.java
@@ -22,8 +22,8 @@ import java.util.List;
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
 import azkaban.executor.ExecutionOptions.FailureAction;
-import azkaban.executor.ExecutorMailer;
 import azkaban.utils.EmailMessage;
+import azkaban.utils.Emailer;
 import azkaban.utils.Utils;
 
 public class DefaultMailCreator implements MailCreator {
@@ -83,7 +83,7 @@ public class DefaultMailCreator implements MailCreator {
 
 			message.println("");
 			message.println("<h3>Reason</h3>");
-			List<String> failedJobs = ExecutorMailer.findFailedJobs(flow);
+			List<String> failedJobs = Emailer.findFailedJobs(flow);
 			message.println("<ul>");
 			for (String jobId : failedJobs) {
 				message.println("<li><a href=\"" + executionUrl + "&job=" + jobId + "\">Failed job '" + jobId + "' Link</a></li>");
@@ -121,7 +121,7 @@ public class DefaultMailCreator implements MailCreator {
 
 			message.println("");
 			message.println("<h3>Reason</h3>");
-			List<String> failedJobs = ExecutorMailer.findFailedJobs(flow);
+			List<String> failedJobs = Emailer.findFailedJobs(flow);
 			message.println("<ul>");
 			for (String jobId : failedJobs) {
 				message.println("<li><a href=\"" + executionUrl + "&job=" + jobId + "\">Failed job '" + jobId + "' Link</a></li>");
diff --git a/src/java/azkaban/jmx/JmxExecutorManager.java b/src/java/azkaban/jmx/JmxExecutorManager.java
index 37f52f8..e3acefb 100644
--- a/src/java/azkaban/jmx/JmxExecutorManager.java
+++ b/src/java/azkaban/jmx/JmxExecutorManager.java
@@ -19,7 +19,7 @@ public class JmxExecutorManager implements JmxExecutorManagerMBean {
 
 	@Override
 	public String getExecutorThreadState() {
-		return manager.getExecutorThreadState().toString();
+		return manager.getExecutorManagerThreadState().toString();
 	}
 	
 	@Override
@@ -29,12 +29,12 @@ public class JmxExecutorManager implements JmxExecutorManagerMBean {
 
 	@Override
 	public boolean isThreadActive() {
-		return manager.isThreadActive();
+		return manager.isExecutorManagerThreadActive();
 	}
 
 	@Override
 	public Long getLastThreadCheckTime() {
-		return manager.getLastThreadCheckTime();
+		return manager.getLastExecutorManagerThreadCheckTime();
 	}
 	
 	@Override 
diff --git a/src/java/azkaban/jmx/JmxExecutorManagerAdapter.java b/src/java/azkaban/jmx/JmxExecutorManagerAdapter.java
new file mode 100644
index 0000000..fc0f8cf
--- /dev/null
+++ b/src/java/azkaban/jmx/JmxExecutorManagerAdapter.java
@@ -0,0 +1,47 @@
+package azkaban.jmx;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import azkaban.executor.ExecutorManagerAdapter;
+
+public class JmxExecutorManagerAdapter implements JmxExecutorManagerAdapterMBean {
+	private ExecutorManagerAdapter manager;
+
+	public JmxExecutorManagerAdapter(ExecutorManagerAdapter manager) {
+		this.manager = manager;
+	}
+
+	@Override
+	public int getNumRunningFlows() {
+		try {
+			return this.manager.getRunningFlows().size();
+		} catch (IOException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+			return 0;
+		}
+	}
+
+	@Override
+	public String getExecutorManagerThreadState() {
+		return manager.getExecutorManagerThreadState().toString();
+	}
+
+	@Override
+	public boolean isExecutorManagerThreadActive() {
+		return manager.isExecutorManagerThreadActive();
+	}
+
+	@Override
+	public Long getLastExecutorManagerThreadCheckTime() {
+		return manager.getLastExecutorManagerThreadCheckTime();
+	}
+	
+	@Override 
+	public List<String> getPrimaryExecutorHostPorts() {
+		return new ArrayList<String>(manager.getPrimaryServerHosts());
+	}
+
+}
diff --git a/src/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java b/src/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
new file mode 100644
index 0000000..197e721
--- /dev/null
+++ b/src/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
@@ -0,0 +1,26 @@
+package azkaban.jmx;
+
+import java.util.List;
+
+public interface JmxExecutorManagerAdapterMBean {
+	@DisplayName("OPERATION: getNumRunningFlows")
+	public int getNumRunningFlows();
+	
+	@DisplayName("OPERATION: getExecutorThreadState")
+	public String getExecutorManagerThreadState();
+
+	@DisplayName("OPERATION: isThreadActive")
+	public boolean isExecutorManagerThreadActive();
+
+	@DisplayName("OPERATION: getLastThreadCheckTime")
+	public Long getLastExecutorManagerThreadCheckTime();
+
+	@DisplayName("OPERATION: getPrimaryExecutorHostPorts")
+	public List<String> getPrimaryExecutorHostPorts();
+	
+//	@DisplayName("OPERATION: getExecutorThreadStage")
+//	public String getExecutorThreadStage();
+//	
+//	@DisplayName("OPERATION: getRunningFlows")
+//	public String getRunningFlows();
+}
diff --git a/src/java/azkaban/jmx/JmxTriggerManager.java b/src/java/azkaban/jmx/JmxTriggerManager.java
new file mode 100644
index 0000000..7537b0b
--- /dev/null
+++ b/src/java/azkaban/jmx/JmxTriggerManager.java
@@ -0,0 +1,63 @@
+package azkaban.jmx;
+
+import org.joda.time.DateTime;
+
+import azkaban.trigger.TriggerManagerAdapter;
+import azkaban.trigger.TriggerManagerAdapter.TriggerJMX;
+
+public class JmxTriggerManager implements JmxTriggerManagerMBean {
+	private TriggerJMX jmxStats;
+
+	public JmxTriggerManager(TriggerManagerAdapter manager) {
+		this.jmxStats = manager.getJMX();
+	}
+
+	@Override
+	public String getLastRunnerThreadCheckTime() {
+		return new DateTime(jmxStats.getLastRunnerThreadCheckTime()).toString();
+	}
+
+	@Override
+	public boolean isRunnerThreadActive() {
+		return jmxStats.isRunnerThreadActive();
+	}
+
+	@Override
+	public String getPrimaryTriggerHostPort() {
+		return jmxStats.getPrimaryServerHost();
+	}
+
+//	@Override
+//	public List<String> getAllTriggerHostPorts() {
+//		return new ArrayList<String>(manager.getAllActiveTriggerServerHosts());
+//	}
+
+	@Override
+	public int getNumTriggers() {
+		return jmxStats.getNumTriggers();
+	}
+
+	@Override
+	public String getTriggerSources() {
+		return jmxStats.getTriggerSources();
+	}
+
+	@Override
+	public String getTriggerIds() {
+		return jmxStats.getTriggerIds();
+	}
+
+	@Override
+	public long getScannerIdleTime() {
+		return jmxStats.getScannerIdleTime();
+	}
+
+	@Override
+	public String getScannerThreadStage() {
+		// TODO Auto-generated method stub
+		return jmxStats.getScannerThreadStage();
+	}
+	
+	
+	
+}
diff --git a/src/java/azkaban/jmx/JmxTriggerManagerMBean.java b/src/java/azkaban/jmx/JmxTriggerManagerMBean.java
new file mode 100644
index 0000000..c87fbd0
--- /dev/null
+++ b/src/java/azkaban/jmx/JmxTriggerManagerMBean.java
@@ -0,0 +1,31 @@
+package azkaban.jmx;
+
+public interface JmxTriggerManagerMBean {	
+	
+	@DisplayName("OPERATION: getLastThreadCheckTime")
+	public String getLastRunnerThreadCheckTime();
+
+	@DisplayName("OPERATION: isThreadActive")
+	public boolean isRunnerThreadActive();
+
+	@DisplayName("OPERATION: getPrimaryTriggerHostPort")
+	public String getPrimaryTriggerHostPort();
+	
+//	@DisplayName("OPERATION: getAllTriggerHostPorts")
+//	public List<String> getAllTriggerHostPorts();
+	
+	@DisplayName("OPERATION: getNumTriggers")
+	public int getNumTriggers();
+	
+	@DisplayName("OPERATION: getTriggerSources")
+	public String getTriggerSources();
+	
+	@DisplayName("OPERATION: getTriggerIds")
+	public String getTriggerIds();
+	
+	@DisplayName("OPERATION: getScannerIdleTime")
+	public long getScannerIdleTime();
+	
+	@DisplayName("OPERATION: getScannerThreadStage")
+	public String getScannerThreadStage();
+}
diff --git a/src/java/azkaban/jmx/JmxTriggerRunnerManagerMBean.java b/src/java/azkaban/jmx/JmxTriggerRunnerManagerMBean.java
new file mode 100644
index 0000000..ca0f45b
--- /dev/null
+++ b/src/java/azkaban/jmx/JmxTriggerRunnerManagerMBean.java
@@ -0,0 +1,23 @@
+package azkaban.jmx;
+
+public interface JmxTriggerRunnerManagerMBean {
+
+	@DisplayName("OPERATION: getLastRunnerThreadCheckTime")
+	public long getLastRunnerThreadCheckTime();
+
+	@DisplayName("OPERATION: getNumTriggers")
+	public int getNumTriggers();
+	
+	@DisplayName("OPERATION: isRunnerThreadActive")
+	public boolean isRunnerThreadActive();
+	
+	@DisplayName("OPERATION: getTriggerSources")
+	public String getTriggerSources();
+	
+	@DisplayName("OPERATION: getTriggerIds")
+	public String getTriggerIds();
+	
+	@DisplayName("OPERATION: getScannerIdleTime")
+	public long getScannerIdleTime();
+	
+}
diff --git a/src/java/azkaban/jobExecutor/NoopJob.java b/src/java/azkaban/jobExecutor/NoopJob.java
index d53e607..c16844d 100644
--- a/src/java/azkaban/jobExecutor/NoopJob.java
+++ b/src/java/azkaban/jobExecutor/NoopJob.java
@@ -26,7 +26,7 @@ import azkaban.utils.Props;
 public class NoopJob implements Job {
 	private String jobId;
 
-	public NoopJob(String jobid, Props props, Logger log) {
+	public NoopJob(String jobid, Props props, Props jobProps, Logger log) {
 		this.jobId = jobid;
 	}
 
diff --git a/src/java/azkaban/jobtype/JobTypeManager.java b/src/java/azkaban/jobtype/JobTypeManager.java
index 96e4bf9..dfde7a3 100644
--- a/src/java/azkaban/jobtype/JobTypeManager.java
+++ b/src/java/azkaban/jobtype/JobTypeManager.java
@@ -131,19 +131,33 @@ public class JobTypeManager
 			throw new JobTypeManagerException("Failed to get global jobtype properties" + e.getCause());
 		}
 		
-		for(File dir : jobPluginsDir.listFiles()) {
-			if(dir.isDirectory() && dir.canRead()) {
-				// get its conf file
-				try {
-					loadJob(dir, globalConf, globalSysConf);
-				}
-				catch (Exception e) {
-					logger.error("Failed to load jobtype " + dir.getName() + e.getMessage());
-					throw new JobTypeManagerException(e);
+		
+		synchronized (this) {
+			ClassLoader prevCl = Thread.currentThread().getContextClassLoader();
+			try{
+				for(File dir : jobPluginsDir.listFiles()) {
+					if(dir.isDirectory() && dir.canRead()) {
+						// get its conf file
+						try {
+							loadJob(dir, globalConf, globalSysConf);
+							Thread.currentThread().setContextClassLoader(prevCl);
+						}
+						catch (Exception e) {
+							logger.error("Failed to load jobtype " + dir.getName() + e.getMessage());
+							throw new JobTypeManagerException(e);
+						}
+					}
 				}
+			} catch(Exception e) {
+				e.printStackTrace();
+				throw new JobTypeManagerException(e);
+			} catch(Throwable t) {
+				t.printStackTrace();
+				throw new JobTypeManagerException(t);
+			} finally {
+				Thread.currentThread().setContextClassLoader(prevCl);
 			}
-		}
-
+ 		}
 	}
 	
 	public static File findFilefromDir(File dir, String fn){
@@ -214,7 +228,6 @@ public class JobTypeManager
 		try {
 			if(confFile != null) {
 				conf = new Props(commonConf, confFile);
-//				conf = PropsUtils.resolveProps(conf);
 			}
 			else {
 				conf = new Props(commonConf);
@@ -237,18 +250,56 @@ public class JobTypeManager
 		logger.info("Loading jobtype " + jobtypeName );
 
 		// sysconf says what jars/confs to load
-		//List<String> jobtypeClasspath = sysConf.getStringList("jobtype.classpath", null, ",");
 		List<URL> resources = new ArrayList<URL>();		
-		for(File f : dir.listFiles()) {
-			try {
+		
+		try {
+			//first global classpath
+			logger.info("Adding global resources.");
+			List<String> typeGlobalClassPath = sysConf.getStringList("jobtype.global.classpath", null, ",");
+			if(typeGlobalClassPath != null) {
+				for(String jar : typeGlobalClassPath) {
+					URL cpItem = new File(jar).toURI().toURL();
+					if(!resources.contains(cpItem)) {
+						logger.info("adding to classpath " + cpItem);
+						resources.add(cpItem);
+					}
+				}
+			}
+			
+			//type specific classpath
+			logger.info("Adding type resources.");
+			List<String> typeClassPath = sysConf.getStringList("jobtype.classpath", null, ",");
+			if(typeClassPath != null) {
+				for(String jar : typeClassPath) {
+					URL cpItem = new File(jar).toURI().toURL();
+					if(!resources.contains(cpItem)) {
+						logger.info("adding to classpath " + cpItem);
+						resources.add(cpItem);
+					}
+				}
+			}			
+			List<String> jobtypeLibDirs = sysConf.getStringList("jobtype.lib.dir", null, ",");
+			if(jobtypeLibDirs != null) {
+				for(String libDir : jobtypeLibDirs) {
+					for(File f : new File(libDir).listFiles()) {
+						if(f.getName().endsWith(".jar")) {
+								resources.add(f.toURI().toURL());
+								logger.info("adding to classpath " + f.toURI().toURL());
+						}
+					}
+				}
+			}
+			
+			logger.info("Adding type override resources.");
+			for(File f : dir.listFiles()) {
 				if(f.getName().endsWith(".jar")) {
-					resources.add(f.toURI().toURL());
-					logger.info("adding to classpath " + f.toURI().toURL());
+						resources.add(f.toURI().toURL());
+						logger.info("adding to classpath " + f.toURI().toURL());
 				}
-			} catch (MalformedURLException e) {
-				// TODO Auto-generated catch block
-				throw new JobTypeManagerException(e);
 			}
+			
+		} catch (MalformedURLException e) {
+			throw new JobTypeManagerException(e);
 		}
 		
 		// each job type can have a different class loader
@@ -266,7 +317,7 @@ public class JobTypeManager
 		logger.info("Doing simple testing...");
 		try {
 			Props fakeSysProps = new Props(sysConf);
-			fakeSysProps.put("type", jobtypeName);
+//			fakeSysProps.put("type", jobtypeName);
 			Props fakeJobProps = new Props(conf);
 			@SuppressWarnings("unused")
 			Job job = (Job)Utils.callConstructor(clazz, "dummy", fakeSysProps, fakeJobProps, logger);
diff --git a/src/java/azkaban/project/ProjectManager.java b/src/java/azkaban/project/ProjectManager.java
index 98d4a36..9229fee 100644
--- a/src/java/azkaban/project/ProjectManager.java
+++ b/src/java/azkaban/project/ProjectManager.java
@@ -50,6 +50,8 @@ public class ProjectManager {
 	private final int projectVersionRetention;
 	private final boolean creatorDefaultPermissions;
 	
+	private boolean loadTriggerFromFile = false;
+	
 	public ProjectManager(ProjectLoader loader, Props props) {
 		this.projectLoader = loader;
 		this.props = props;
@@ -59,13 +61,19 @@ public class ProjectManager {
 		
 		this.creatorDefaultPermissions = props.getBoolean("creator.default.proxy", true);
 		
+		this.loadTriggerFromFile = props.getBoolean("enable.load.trigger.from.file", false);
+		
 		if (!tempDir.exists()) {
 			tempDir.mkdirs();
 		}
 		
 		loadAllProjects();
 	}
-	
+
+	public void setLoadTriggerFromFile(boolean enable) {
+		this.loadTriggerFromFile = enable;
+	}
+
 	private void loadAllProjects() {
 		List<Project> projects;
 		try {
@@ -351,6 +359,22 @@ public class ProjectManager {
 			logger.info("Uploading Props properties");
 			projectLoader.uploadProjectProperties(project, propProps);
 		}
+	
+		//TODO: find something else to load triggers
+//		if(loadTriggerFromFile) {
+//			logger.info("Loading triggers.");
+//			Props triggerProps = new Props();
+//			triggerProps.put("projectId", project.getId());
+//			triggerProps.put("projectName", project.getName());
+//			triggerProps.put("submitUser", uploader.getUserId());
+//			try {
+//				triggerManager.loadTriggerFromDir(file, triggerProps);
+//			} catch (Exception e) {
+//				// TODO Auto-generated catch block
+//				e.printStackTrace();
+//				logger.error("Failed to load triggers.", e);
+//			}
+//		}
 		
 		logger.info("Uploaded project files. Cleaning up temp files.");
 		projectLoader.postEvent(project, EventType.UPLOADED, uploader.getUserId(), "Uploaded project files zip " + archive.getName());
diff --git a/src/java/azkaban/scheduler/Schedule.java b/src/java/azkaban/scheduler/Schedule.java
index 1f103fa..32eb916 100644
--- a/src/java/azkaban/scheduler/Schedule.java
+++ b/src/java/azkaban/scheduler/Schedule.java
@@ -16,7 +16,9 @@
 
 package azkaban.scheduler;
 
+import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 
 import org.joda.time.DateTime;
@@ -31,7 +33,7 @@ import org.joda.time.Seconds;
 import org.joda.time.Weeks;
 
 import azkaban.executor.ExecutionOptions;
-import azkaban.sla.SlaOptions;
+import azkaban.sla.SlaOption;
 import azkaban.utils.Pair;
 
 public class Schedule{
@@ -54,8 +56,10 @@ public class Schedule{
 	private String status;
 	private long submitTime;
 	
+	private boolean skipPastOccurrences = true;
+	
 	private ExecutionOptions executionOptions;
-	private SlaOptions slaOptions;
+	private List<SlaOption> slaOptions;
 	
 	public Schedule(
 						int scheduleId,
@@ -103,7 +107,7 @@ public class Schedule{
 						long submitTime,
 						String submitUser,
 						ExecutionOptions executionOptions,
-						SlaOptions slaOptions
+						List<SlaOption> slaOptions
 			) {
 		this(scheduleId, projectId, 
 				projectName, 
@@ -135,7 +139,7 @@ public class Schedule{
 						long submitTime,
 						String submitUser,
 						ExecutionOptions executionOptions,
-						SlaOptions slaOptions
+						List<SlaOption> slaOptions
 						) {
 		this.scheduleId = scheduleId;
 		this.projectId = projectId;
@@ -156,16 +160,16 @@ public class Schedule{
 	public ExecutionOptions getExecutionOptions() {
 		return executionOptions;
 	}
+	
+	public List<SlaOption> getSlaOptions() {
+		return slaOptions;
+	}
 
 	public void setFlowOptions(ExecutionOptions executionOptions) {
 		this.executionOptions = executionOptions;
 	}
-
-	public SlaOptions getSlaOptions() {
-		return slaOptions;
-	}
-
-	public void setSlaOptions(SlaOptions slaOptions) {
+	
+	public void setSlaOptions(List<SlaOption> slaOptions) {
 		this.slaOptions = slaOptions;
 	}
 
@@ -249,6 +253,10 @@ public class Schedule{
 		return false;
 	}
 	
+	public void setNextExecTime(long nextExecTime) {
+		this.nextExecTime = nextExecTime;
+	}
+	
 	private DateTime getNextRuntime(long scheduleTime, DateTimeZone timezone, ReadablePeriod period) {
 		DateTime now = new DateTime();
 		DateTime date = new DateTime(scheduleTime).withZone(timezone);
@@ -337,16 +345,21 @@ public class Schedule{
 		return periodStr;
 	}
 	
-	
 	public Map<String,Object> optionsToObject() {
-		if(executionOptions != null || slaOptions != null) {
+		if(executionOptions != null ) {
 			HashMap<String, Object> schedObj = new HashMap<String, Object>();
 			
 			if(executionOptions != null) {
 				schedObj.put("executionOptions", executionOptions.toObject());
 			}
+			
 			if(slaOptions != null) {
-				schedObj.put("slaOptions", slaOptions.toObject());
+				List<Object> slaOptionsObject = new ArrayList<Object>();
+//				schedObj.put("slaOptions", slaOptions.toObject());
+				for(SlaOption sla : slaOptions) {
+					slaOptionsObject.add(sla.toObject());
+				}
+				schedObj.put("slaOptions", slaOptionsObject);
 			}
 	
 			return schedObj;
@@ -354,8 +367,8 @@ public class Schedule{
 		return null;
 	}
 	
+	@SuppressWarnings("unchecked")
 	public void createAndSetScheduleOptions(Object obj) {
-		@SuppressWarnings("unchecked")
 		HashMap<String, Object> schedObj = (HashMap<String, Object>)obj;
 		if (schedObj.containsKey("executionOptions")) {
 			ExecutionOptions execOptions = ExecutionOptions.createFromObject(schedObj.get("executionOptions"));
@@ -370,10 +383,25 @@ public class Schedule{
 			this.executionOptions = new ExecutionOptions();
 			this.executionOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
 		}
-
-		if (schedObj.containsKey("slaOptions")) {
-			SlaOptions slaOptions = SlaOptions.fromObject(schedObj.get("slaOptions"));
+		
+		if(schedObj.containsKey("slaOptions")) {
+			List<Object> slaOptionsObject = (List<Object>) schedObj.get("slaOptions");
+			List<SlaOption> slaOptions = new ArrayList<SlaOption>();
+			for(Object slaObj : slaOptionsObject) {
+				slaOptions.add(SlaOption.fromObject(slaObj));
+			}
 			this.slaOptions = slaOptions;
 		}
+		
+		
 	}
+
+	public boolean isRecurring() {
+		return period == null ? false : true;
+	}
+
+	public boolean skipPastOccurrences() {
+		return skipPastOccurrences;
+	}
+	
 }
diff --git a/src/java/azkaban/scheduler/ScheduleLoader.java b/src/java/azkaban/scheduler/ScheduleLoader.java
index 5856c18..e834cea 100644
--- a/src/java/azkaban/scheduler/ScheduleLoader.java
+++ b/src/java/azkaban/scheduler/ScheduleLoader.java
@@ -30,4 +30,5 @@ public interface ScheduleLoader {
 
 	public void updateNextExecTime(Schedule s) throws ScheduleManagerException;
 
+	public List<Schedule> loadUpdatedSchedules() throws ScheduleManagerException;
 }
diff --git a/src/java/azkaban/scheduler/ScheduleManager.java b/src/java/azkaban/scheduler/ScheduleManager.java
index fededda..81c5c29 100644
--- a/src/java/azkaban/scheduler/ScheduleManager.java
+++ b/src/java/azkaban/scheduler/ScheduleManager.java
@@ -16,37 +16,23 @@
 
 package azkaban.scheduler;
 
-import java.lang.Thread.State;
 import java.util.ArrayList;
-import java.util.Comparator;
-import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.PriorityBlockingQueue;
-import java.util.concurrent.atomic.AtomicBoolean;
 
 import org.apache.log4j.Logger;
-import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.ReadablePeriod;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 
-import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
-import azkaban.executor.ExecutorManager;
-import azkaban.executor.ExecutorManagerException;
-import azkaban.flow.Flow;
-import azkaban.project.Project;
-import azkaban.project.ProjectManager;
-import azkaban.sla.SLA.SlaAction;
-import azkaban.sla.SLA.SlaRule;
-import azkaban.sla.SLA.SlaSetting;
-import azkaban.sla.SLAManager;
-import azkaban.sla.SlaOptions;
+import azkaban.sla.SlaOption;
+import azkaban.trigger.TriggerAgent;
+import azkaban.trigger.TriggerStatus;
 import azkaban.utils.Pair;
+import azkaban.utils.Props;
 
 /**
  * The ScheduleManager stores and executes the schedule. It uses a single thread
@@ -54,23 +40,29 @@ import azkaban.utils.Pair;
  * the flow from the schedule when it is run, which can potentially allow the
  * flow to and overlap each other.
  */
-public class ScheduleManager {
+public class ScheduleManager implements TriggerAgent {
 	private static Logger logger = Logger.getLogger(ScheduleManager.class);
 
+	public static final String triggerSource = "SimpleTimeTrigger";
 	private final DateTimeFormatter _dateFormat = DateTimeFormat.forPattern("MM-dd-yyyy HH:mm:ss:SSS");
 	private ScheduleLoader loader;
 
-	private Map<Pair<Integer, String>, Set<Schedule>> scheduleIdentityPairMap = new LinkedHashMap<Pair<Integer, String>, Set<Schedule>>();
+//	private Map<Pair<Integer, String>, Set<Schedule>> scheduleIdentityPairMap = new LinkedHashMap<Pair<Integer, String>, Set<Schedule>>();
 	private Map<Integer, Schedule> scheduleIDMap = new LinkedHashMap<Integer, Schedule>();
-	private final ScheduleRunner runner;
-	private final ExecutorManager executorManager;
-	private final ProjectManager projectManager;
-	private final SLAManager slaManager;
+	private Map<Pair<Integer, String>, Schedule> scheduleIdentityPairMap = new LinkedHashMap<Pair<Integer, String>, Schedule>();
 	
+//	private final ExecutorManagerAdapter executorManager;
+//	
+//	private ProjectManager projectManager = null;
+//	
 	// Used for mbeans to query Scheduler status
-	private long lastCheckTime = -1;
-	private long nextWakupTime = -1;
-	private String runnerStage = "not started";
+//<<<<<<< HEAD
+//	
+//=======
+//	private long lastCheckTime = -1;
+//	private long nextWakupTime = -1;
+//	private String runnerStage = "not started";
+//>>>>>>> 10830aeb8ac819473873cac3bb4e07b4aeda67e8
 
 	/**
 	 * Give the schedule manager a loader class that will properly load the
@@ -78,17 +70,19 @@ public class ScheduleManager {
 	 * 
 	 * @param loader
 	 */
-	public ScheduleManager(ExecutorManager executorManager,
-							ProjectManager projectManager, 
-							SLAManager slaManager,
-							ScheduleLoader loader) 
+	public ScheduleManager (ScheduleLoader loader) 
 	{
-		this.executorManager = executorManager;
-		this.projectManager = projectManager;
-		this.slaManager = slaManager;
+//		this.executorManager = executorManager;
 		this.loader = loader;
-		this.runner = new ScheduleRunner();
-
+		
+	}
+	
+//	public void setProjectManager(ProjectManager projectManager) {
+//		this.projectManager = projectManager;
+//	}
+	
+	@Override
+	public void start() throws ScheduleManagerException {
 		List<Schedule> scheduleList = null;
 		try {
 			scheduleList = loader.loadSchedules();
@@ -99,10 +93,30 @@ public class ScheduleManager {
 		}
 
 		for (Schedule sched : scheduleList) {
-			internalSchedule(sched);
+			if(sched.getStatus().equals(TriggerStatus.EXPIRED.toString())) {
+				onScheduleExpire(sched);
+			} else {
+				internalSchedule(sched);
+			}
 		}
 
-		this.runner.start();
+	}
+	
+	// only do this when using external runner
+	public synchronized void updateLocal() throws ScheduleManagerException {
+
+		List<Schedule> updates = loader.loadUpdatedSchedules();
+		for(Schedule s : updates) {
+			if(s.getStatus().equals(TriggerStatus.EXPIRED.toString())) {
+				onScheduleExpire(s);
+			} else {
+				internalSchedule(s);
+			}
+		}
+	}
+	
+	private void onScheduleExpire(Schedule s) {
+		removeSchedule(s);
 	}
 
 	/**
@@ -110,16 +124,31 @@ public class ScheduleManager {
 	 * it again.
 	 */
 	public void shutdown() {
-		this.runner.shutdown();
+
 	}
 
 	/**
 	 * Retrieves a copy of the list of schedules.
 	 * 
 	 * @return
+	 * @throws ScheduleManagerException 
 	 */
-	public synchronized List<Schedule> getSchedules() {
-		return runner.getRunnerSchedules();
+	public synchronized List<Schedule> getSchedules() throws ScheduleManagerException {
+//		if(useExternalRunner) {
+//			for(Schedule s : scheduleIDMap.values()) {
+//				try {
+//					loader.updateNextExecTime(s);
+//				} catch (ScheduleManagerException e) {
+//					// TODO Auto-generated catch block
+//					e.printStackTrace();
+//					logger.error("Failed to update schedule from external runner for schedule " + s.getScheduleId());
+//				}
+//			}
+//		}
+		
+		//return runner.getRunnerSchedules();
+		updateLocal();
+		return new ArrayList<Schedule>(scheduleIDMap.values());
 	}
 
 	/**
@@ -127,18 +156,26 @@ public class ScheduleManager {
 	 * 
 	 * @param id
 	 * @return
+	 * @throws ScheduleManagerException 
 	*/
-	public Set<Schedule> getSchedules(int projectId, String flowId) {
-		return scheduleIdentityPairMap.get(new Pair<Integer,String>(projectId, flowId));
-	}
+//	public Set<Schedule> getSchedules(int projectId, String flowId) throws ScheduleManagerException {
+//		updateLocal();
+//		return scheduleIdentityPairMap.get(new Pair<Integer,String>(projectId, flowId));
+//	}
+	public Schedule getSchedule(int projectId, String flowId) throws ScheduleManagerException {
+	updateLocal();
+	return scheduleIdentityPairMap.get(new Pair<Integer,String>(projectId, flowId));
+}
 
 	/**
 	 * Returns the scheduled flow for the scheduleId
 	 * 
 	 * @param id
 	 * @return
+	 * @throws ScheduleManagerException 
 	*/
-	public Schedule getSchedule(int scheduleId) {
+	public Schedule getSchedule(int scheduleId) throws ScheduleManagerException {
+		updateLocal();
 		return scheduleIDMap.get(scheduleId);
 	}
 
@@ -147,10 +184,19 @@ public class ScheduleManager {
 	 * Removes the flow from the schedule if it exists.
 	 * 
 	 * @param id
+	 * @throws ScheduleManagerException 
 	 */
-	public synchronized void removeSchedules(int projectId, String flowId) {
-		Set<Schedule> schedules = getSchedules(projectId, flowId);
-		for(Schedule sched : schedules) {
+//	public synchronized void removeSchedules(int projectId, String flowId) throws ScheduleManagerException {
+//		Set<Schedule> schedules = getSchedules(projectId, flowId);
+//		if(schedules != null) {
+//			for(Schedule sched : schedules) {
+//				removeSchedule(sched);
+//			}
+//		}
+//	}
+	public synchronized void removeSchedule(int projectId, String flowId) throws ScheduleManagerException {
+		Schedule sched = getSchedule(projectId, flowId);
+		if(sched != null) {
 			removeSchedule(sched);
 		}
 	}
@@ -161,22 +207,28 @@ public class ScheduleManager {
 	 */
 	public synchronized void removeSchedule(Schedule sched) {
 		Pair<Integer,String> identityPairMap = sched.getScheduleIdentityPair();
-		Set<Schedule> schedules = scheduleIdentityPairMap.get(identityPairMap);
-		if(schedules != null) {
-			schedules.remove(sched);
-			if(schedules.size() == 0) {
-				scheduleIdentityPairMap.remove(identityPairMap);
-			}
+//		Set<Schedule> schedules = scheduleIdentityPairMap.get(identityPairMap);
+//		if(schedules != null) {
+//			schedules.remove(sched);
+//			if(schedules.size() == 0) {
+//				scheduleIdentityPairMap.remove(identityPairMap);
+//			}
+//		}
+		Schedule schedule = scheduleIdentityPairMap.get(identityPairMap);
+		if(schedule != null) {
+			scheduleIdentityPairMap.remove(identityPairMap);
 		}
+
 		scheduleIDMap.remove(sched.getScheduleId());
 		
-		runner.removeRunnerSchedule(sched);
 		try {
 			loader.removeSchedule(sched);
 		} catch (ScheduleManagerException e) {
 			// TODO Auto-generated catch block
 			e.printStackTrace();
 		}
+
+		
 	}
 
 	// public synchronized void pauseScheduledFlow(String scheduleId){
@@ -232,7 +284,7 @@ public class ScheduleManager {
 			final long submitTime,
 			final String submitUser,
 			ExecutionOptions execOptions,
-			SlaOptions slaOptions
+			List<SlaOption> slaOptions
 			) {
 		Schedule sched = new Schedule(scheduleId, projectId, projectName, flowName, status, firstSchedTime, timezone, period, lastModifyTime, nextExecTime, submitTime, submitUser, execOptions, slaOptions);
 		logger.info("Scheduling flow '" + sched.getScheduleName() + "' for "
@@ -249,19 +301,20 @@ public class ScheduleManager {
 	 * @param flow
 	 */
 	private synchronized void internalSchedule(Schedule s) {
-		Schedule existing = scheduleIDMap.get(s.getScheduleId());
-		if (existing != null) {
-			this.runner.removeRunnerSchedule(existing);
-		}
-		s.updateTime();
-		this.runner.addRunnerSchedule(s);
+		//Schedule existing = scheduleIDMap.get(s.getScheduleId());
+//		Schedule existing = null;
+//		if(scheduleIdentityPairMap.get(s.getScheduleIdentityPair()) != null) {
+//			existing = scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
+//		}
+
 		scheduleIDMap.put(s.getScheduleId(), s);
-		Set<Schedule> schedules = scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
-		if(schedules == null) {
-			schedules = new HashSet<Schedule>();
-			scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), schedules);
-		}
-		schedules.add(s);
+//		Set<Schedule> schedules = scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
+//		if(schedules == null) {
+//			schedules = new HashSet<Schedule>();
+//			scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), schedules);
+//		}
+//		schedules.add(s);
+		scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), s);
 	}
 
 	/**
@@ -270,14 +323,16 @@ public class ScheduleManager {
 	 * @param flow
 	 */
 	public synchronized void insertSchedule(Schedule s) {
-		boolean exist = s.getScheduleId() != -1;
+		//boolean exist = s.getScheduleId() != -1;
+		Schedule exist = scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
 		if(s.updateTime()) {
 			try {
-				if(!exist) {
+				if(exist == null) {
 					loader.insertSchedule(s);
 					internalSchedule(s);
 				}
 				else{
+					s.setScheduleId(exist.getScheduleId());
 					loader.updateSchedule(s);
 					internalSchedule(s);
 				}
@@ -290,14 +345,13 @@ public class ScheduleManager {
 			logger.error("The provided schedule is non-recurring and the scheduled time already passed. " + s.getScheduleName());
 		}
 	}
-
-//	/**
-//	 * Save the schedule
-//	 */
-//	private void saveSchedule() {
-//		loader.saveSchedule(getSchedule());
-//	}
 	
+
+	@Override
+	public void loadTriggerFromProps(Props props) throws ScheduleManagerException {
+		throw new ScheduleManagerException("create " + getTriggerSource() + " from json not supported yet" );
+	}	
+
 	/**
 	 * Thread that simply invokes the running of flows when the schedule is
 	 * ready.
@@ -305,257 +359,260 @@ public class ScheduleManager {
 	 * @author Richard Park
 	 * 
 	 */
-	public class ScheduleRunner extends Thread {
-		private final PriorityBlockingQueue<Schedule> schedules;
-		private AtomicBoolean stillAlive = new AtomicBoolean(true);
-
-		// Five minute minimum intervals
-		private static final int TIMEOUT_MS = 300000;
-
-		public ScheduleRunner() {
-			schedules = new PriorityBlockingQueue<Schedule>(1,new ScheduleComparator());
-		}
-
-		public void shutdown() {
-			logger.error("Shutting down scheduler thread");
-			stillAlive.set(false);
-			this.interrupt();
-		}
-
-		/**
-		 * Return a list of scheduled flow
-		 * 
-		 * @return
-		 */
-		public synchronized List<Schedule> getRunnerSchedules() {
-			return new ArrayList<Schedule>(schedules);
-		}
-
-		/**
-		 * Adds the flow to the schedule and then interrupts so it will update
-		 * its wait time.
-		 * 
-		 * @param flow
-		 */
-		public synchronized void addRunnerSchedule(Schedule s) {
-			logger.info("Adding " + s + " to schedule runner.");
-			schedules.add(s);
-			// MonitorImpl.getInternalMonitorInterface().workflowEvent(null,
-			// System.currentTimeMillis(),
-			// WorkflowAction.SCHEDULE_WORKFLOW,
-			// WorkflowState.NOP,
-			// flow.getId());
-
-			this.interrupt();
-		}
-
-		/**
-		 * Remove scheduled flows. Does not interrupt.
-		 * 
-		 * @param flow
-		 */
-		public synchronized void removeRunnerSchedule(Schedule s) {
-			logger.info("Removing " + s + " from the schedule runner.");
-			schedules.remove(s);
-			// MonitorImpl.getInternalMonitorInterface().workflowEvent(null,
-			// System.currentTimeMillis(),
-			// WorkflowAction.UNSCHEDULE_WORKFLOW,
-			// WorkflowState.NOP,
-			// flow.getId());
-			// Don't need to interrupt, because if this is originally on the top
-			// of the queue,
-			// it'll just skip it.
-		}
-
-		public void run() {
-			while (stillAlive.get()) {
-				synchronized (this) {
-					try {
-						lastCheckTime = System.currentTimeMillis();
-						
-						runnerStage = "Starting schedule scan.";
-						// TODO clear up the exception handling
-						Schedule s = schedules.peek();
-
-						if (s == null) {
-							// If null, wake up every minute or so to see if
-							// there's something to do. Most likely there will not be.
-							try {
-								logger.info("Nothing scheduled to run. Checking again soon.");
-								runnerStage = "Waiting for next round scan.";
-								nextWakupTime = System.currentTimeMillis() + TIMEOUT_MS;
-								this.wait(TIMEOUT_MS);
-							} catch (InterruptedException e) {
-								// interruption should occur when items are added or removed from the queue.
-							}
-						} else {
-							// We've passed the flow execution time, so we will run.
-							if (!(new DateTime(s.getNextExecTime())).isAfterNow()) {
-								// Run flow. The invocation of flows should be quick.
-								Schedule runningSched = schedules.poll();
-
-								runnerStage = "Ready to run schedule " + runningSched.toString();
-								
-								logger.info("Scheduler ready to run " + runningSched.toString());
-								// Execute the flow here
-								try {
-									Project project = projectManager.getProject(runningSched.getProjectId());
-									if (project == null) {
-										logger.error("Scheduled Project " + runningSched.getProjectId() + " does not exist!");
-										throw new RuntimeException("Error finding the scheduled project. "+ runningSched.getProjectId());
-									}	
-									//TODO It is possible that the project is there, but the flow doesn't exist because upload a version that changes flow structure
-
-									Flow flow = project.getFlow(runningSched.getFlowName());
-									if (flow == null) {
-										logger.error("Flow " + runningSched.getScheduleName() + " cannot be found in project " + project.getName());
-										throw new RuntimeException("Error finding the scheduled flow. " + runningSched.getScheduleName());
-									}
-									
-									// Create ExecutableFlow
-									ExecutableFlow exflow = new ExecutableFlow(flow);
-									System.out.println("ScheduleManager: creating schedule: " +runningSched.getScheduleId());
-									exflow.setScheduleId(runningSched.getScheduleId());
-									exflow.setSubmitUser(runningSched.getSubmitUser());
-									exflow.addAllProxyUsers(project.getProxyUsers());
-									
-									ExecutionOptions flowOptions = runningSched.getExecutionOptions();
-									if(flowOptions == null) {
-										flowOptions = new ExecutionOptions();
-										flowOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
-									}
-									exflow.setExecutionOptions(flowOptions);
-									
-									if (!flowOptions.isFailureEmailsOverridden()) {
-										flowOptions.setFailureEmails(flow.getFailureEmails());
-									}
-									if (!flowOptions.isSuccessEmailsOverridden()) {
-										flowOptions.setSuccessEmails(flow.getSuccessEmails());
-									}
-									
-									runnerStage = "Submitting flow " + exflow.getFlowId();
-									flowOptions.setMailCreator(flow.getMailCreator());
-									
-									try {
-										executorManager.submitExecutableFlow(exflow);
-										logger.info("Scheduler has invoked " + exflow.getExecutionId());
-									} 
-									catch (ExecutorManagerException e) {
-										throw e;
-									}
-									catch (Exception e) {	
-										e.printStackTrace();
-										throw new ScheduleManagerException("Scheduler invoked flow " + exflow.getExecutionId() + " has failed.", e);
-									}
-									
-									SlaOptions slaOptions = runningSched.getSlaOptions();
-									if(slaOptions != null) {
-										logger.info("Submitting SLA checkings for " + runningSched.getFlowName());
-										runnerStage = "Submitting SLA checkings for " + runningSched.getFlowName();
-										// submit flow slas
-										List<SlaSetting> jobsettings = new ArrayList<SlaSetting>();
-										for(SlaSetting set : slaOptions.getSettings()) {
-											if(set.getId().equals("")) {
-												DateTime checkTime = new DateTime(runningSched.getNextExecTime()).plus(set.getDuration());
-												slaManager.submitSla(exflow.getExecutionId(), "", checkTime, slaOptions.getSlaEmails(), set.getActions(), null, set.getRule());
-											}
-											else {
-												jobsettings.add(set);
-											}
-										}
-										if(jobsettings.size() > 0) {
-											slaManager.submitSla(exflow.getExecutionId(), "", DateTime.now(), slaOptions.getSlaEmails(), new ArrayList<SlaAction>(), jobsettings, SlaRule.WAITANDCHECKJOB);
-										}
-									}
-									
-								} 
-								catch (ExecutorManagerException e) {
-									if (e.getReason() != null && e.getReason() == ExecutorManagerException.Reason.SkippedExecution) {
-										logger.info(e.getMessage());
-									}
-									else {
-										e.printStackTrace();
-									}
-								}
-								catch (Exception e) {
-									logger.info("Scheduler failed to run job. " + e.getMessage() + e.getCause());
-								}
-
-								runnerStage = "Done running schedule for " + runningSched.toString();
-								removeRunnerSchedule(runningSched);
-
-								// Immediately reschedule if it's possible. Let
-								// the execution manager
-								// handle any duplicate runs.
-								if (runningSched.updateTime()) {
-									addRunnerSchedule(runningSched);
-									loader.updateSchedule(runningSched);
-								}
-								else {
-									removeSchedule(runningSched);
-								}								
-							} else {
-								runnerStage = "Waiting for next round scan.";
-								// wait until flow run
-								long millisWait = Math.max(0, s.getNextExecTime() - (new DateTime()).getMillis());
-								try {
-									nextWakupTime = System.currentTimeMillis() + millisWait;
-									this.wait(Math.min(millisWait, TIMEOUT_MS));
-								} catch (InterruptedException e) {
-									// interruption should occur when items are
-									// added or removed from the queue.
-								}
-							}
-						}
-					} catch (Exception e) {
-						logger.error("Unexpected exception has been thrown in scheduler", e);
-					} catch (Throwable e) {
-						logger.error("Unexpected throwable has been thrown in scheduler", e);
-					}
-				}
-			}
-		}
-
-		/**
-		 * Class to sort the schedule based on time.
-		 * 
-		 * @author Richard Park
-		 */
-		private class ScheduleComparator implements Comparator<Schedule> {
-			@Override
-			public int compare(Schedule arg0, Schedule arg1) {
-				long first = arg1.getNextExecTime();
-				long second = arg0.getNextExecTime();
-
-				if (first == second) {
-					return 0;
-				} else if (first < second) {
-					return 1;
-				}
-
-				return -1;
-			}
-		}
-	}
-	
-	public long getLastCheckTime() {
-		return lastCheckTime;
-	}
-	
-	public long getNextUpdateTime() {
-		return nextWakupTime;
-	}
-	
-	public State getThreadState() {
-		return runner.getState();
-	}
+//	public class ScheduleRunner extends Thread {
+//		private final PriorityBlockingQueue<Schedule> schedules;
+//		private AtomicBoolean stillAlive = new AtomicBoolean(true);
+//
+//		// Five minute minimum intervals
+//		private static final int TIMEOUT_MS = 300000;
+//
+//		public ScheduleRunner() {
+//			schedules = new PriorityBlockingQueue<Schedule>(1,new ScheduleComparator());
+//		}
+//
+//		public void shutdown() {
+//			logger.error("Shutting down scheduler thread");
+//			stillAlive.set(false);
+//			this.interrupt();
+//		}
+//
+//		/**
+//		 * Return a list of scheduled flow
+//		 * 
+//		 * @return
+//		 */
+//		public synchronized List<Schedule> getRunnerSchedules() {
+//			return new ArrayList<Schedule>(schedules);
+//		}
+//
+//		/**
+//		 * Adds the flow to the schedule and then interrupts so it will update
+//		 * its wait time.
+//		 * 
+//		 * @param flow
+//		 */
+//		public synchronized void addRunnerSchedule(Schedule s) {
+//			logger.info("Adding " + s + " to schedule runner.");
+//			schedules.add(s);
+//			// MonitorImpl.getInternalMonitorInterface().workflowEvent(null,
+//			// System.currentTimeMillis(),
+//			// WorkflowAction.SCHEDULE_WORKFLOW,
+//			// WorkflowState.NOP,
+//			// flow.getId());
+//
+//			this.interrupt();
+//		}
+//
+//		/**
+//		 * Remove scheduled flows. Does not interrupt.
+//		 * 
+//		 * @param flow
+//		 */
+//		public synchronized void removeRunnerSchedule(Schedule s) {
+//			logger.info("Removing " + s + " from the schedule runner.");
+//			schedules.remove(s);
+//			// MonitorImpl.getInternalMonitorInterface().workflowEvent(null,
+//			// System.currentTimeMillis(),
+//			// WorkflowAction.UNSCHEDULE_WORKFLOW,
+//			// WorkflowState.NOP,
+//			// flow.getId());
+//			// Don't need to interrupt, because if this is originally on the top
+//			// of the queue,
+//			// it'll just skip it.
+//		}
+//
+//		public void run() {
+//			while (stillAlive.get()) {
+//				synchronized (this) {
+//					try {
+//						lastCheckTime = System.currentTimeMillis();
+//						
+//						runnerStage = "Starting schedule scan.";
+//						// TODO clear up the exception handling
+//						Schedule s = schedules.peek();
+//
+//						if (s == null) {
+//							// If null, wake up every minute or so to see if
+//							// there's something to do. Most likely there will not be.
+//							try {
+//								logger.info("Nothing scheduled to run. Checking again soon.");
+//								runnerStage = "Waiting for next round scan.";
+//								nextWakupTime = System.currentTimeMillis() + TIMEOUT_MS;
+//								this.wait(TIMEOUT_MS);
+//							} catch (InterruptedException e) {
+//								// interruption should occur when items are added or removed from the queue.
+//							}
+//						} else {
+//							// We've passed the flow execution time, so we will run.
+//							if (!(new DateTime(s.getNextExecTime())).isAfterNow()) {
+//								// Run flow. The invocation of flows should be quick.
+//								Schedule runningSched = schedules.poll();
+//
+//								runnerStage = "Ready to run schedule " + runningSched.toString();
+//								
+//								logger.info("Scheduler ready to run " + runningSched.toString());
+//								// Execute the flow here
+//								try {
+//									Project project = projectManager.getProject(runningSched.getProjectId());
+//									if (project == null) {
+//										logger.error("Scheduled Project " + runningSched.getProjectId() + " does not exist!");
+//										throw new RuntimeException("Error finding the scheduled project. "+ runningSched.getProjectId());
+//									}	
+//									//TODO It is possible that the project is there, but the flow doesn't exist because upload a version that changes flow structure
+//
+//									Flow flow = project.getFlow(runningSched.getFlowName());
+//									if (flow == null) {
+//										logger.error("Flow " + runningSched.getScheduleName() + " cannot be found in project " + project.getName());
+//										throw new RuntimeException("Error finding the scheduled flow. " + runningSched.getScheduleName());
+//									}
+//									
+//									// Create ExecutableFlow
+//									ExecutableFlow exflow = new ExecutableFlow(flow);
+//									System.out.println("ScheduleManager: creating schedule: " +runningSched.getScheduleId());
+//									exflow.setScheduleId(runningSched.getScheduleId());
+//									exflow.setSubmitUser(runningSched.getSubmitUser());
+//									exflow.addAllProxyUsers(project.getProxyUsers());
+//									
+//									ExecutionOptions flowOptions = runningSched.getExecutionOptions();
+//									if(flowOptions == null) {
+//										flowOptions = new ExecutionOptions();
+//										flowOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
+//									}
+//									exflow.setExecutionOptions(flowOptions);
+//									
+//									if (!flowOptions.isFailureEmailsOverridden()) {
+//										flowOptions.setFailureEmails(flow.getFailureEmails());
+//									}
+//									if (!flowOptions.isSuccessEmailsOverridden()) {
+//										flowOptions.setSuccessEmails(flow.getSuccessEmails());
+//									}
+//									
+//									runnerStage = "Submitting flow " + exflow.getFlowId();
+//									flowOptions.setMailCreator(flow.getMailCreator());
+//									
+//									try {
+//										executorManager.submitExecutableFlow(exflow);
+//										logger.info("Scheduler has invoked " + exflow.getExecutionId());
+//									} 
+//									catch (ExecutorManagerException e) {
+//										throw e;
+//									}
+//									catch (Exception e) {	
+//										e.printStackTrace();
+//										throw new ScheduleManagerException("Scheduler invoked flow " + exflow.getExecutionId() + " has failed.", e);
+//									}
+//									
+//									SlaOptions slaOptions = runningSched.getSlaOptions();
+//									if(slaOptions != null) {
+//										logger.info("Submitting SLA checkings for " + runningSched.getFlowName());
+//										runnerStage = "Submitting SLA checkings for " + runningSched.getFlowName();
+//										// submit flow slas
+//										List<SlaSetting> jobsettings = new ArrayList<SlaSetting>();
+//										for(SlaSetting set : slaOptions.getSettings()) {
+//											if(set.getId().equals("")) {
+//												DateTime checkTime = new DateTime(runningSched.getNextExecTime()).plus(set.getDuration());
+//												slaManager.submitSla(exflow.getExecutionId(), "", checkTime, slaOptions.getSlaEmails(), set.getActions(), null, set.getRule());
+//											}
+//											else {
+//												jobsettings.add(set);
+//											}
+//										}
+//										if(jobsettings.size() > 0) {
+//											slaManager.submitSla(exflow.getExecutionId(), "", DateTime.now(), slaOptions.getSlaEmails(), new ArrayList<SlaAction>(), jobsettings, SlaRule.WAITANDCHECKJOB);
+//										}
+//									}
+//									
+//								} 
+//								catch (ExecutorManagerException e) {
+//									if (e.getReason() != null && e.getReason() == ExecutorManagerException.Reason.SkippedExecution) {
+//										logger.info(e.getMessage());
+//									}
+//									else {
+//										e.printStackTrace();
+//									}
+//								}
+//								catch (Exception e) {
+//									logger.info("Scheduler failed to run job. " + e.getMessage() + e.getCause());
+//								}
+//
+//								runnerStage = "Done running schedule for " + runningSched.toString();
+//								removeRunnerSchedule(runningSched);
+//
+//								// Immediately reschedule if it's possible. Let
+//								// the execution manager
+//								// handle any duplicate runs.
+//								if (runningSched.updateTime()) {
+//									addRunnerSchedule(runningSched);
+//									loader.updateSchedule(runningSched);
+//								}
+//								else {
+//									removeSchedule(runningSched);
+//								}								
+//							} else {
+//								runnerStage = "Waiting for next round scan.";
+//								// wait until flow run
+//								long millisWait = Math.max(0, s.getNextExecTime() - (new DateTime()).getMillis());
+//								try {
+//									nextWakupTime = System.currentTimeMillis() + millisWait;
+//									this.wait(Math.min(millisWait, TIMEOUT_MS));
+//								} catch (InterruptedException e) {
+//									// interruption should occur when items are
+//									// added or removed from the queue.
+//								}
+//							}
+//						}
+//					} catch (Exception e) {
+//						logger.error("Unexpected exception has been thrown in scheduler", e);
+//					} catch (Throwable e) {
+//						logger.error("Unexpected throwable has been thrown in scheduler", e);
+//					}
+//				}
+//			}
+//		}
+//
+//		/**
+//		 * Class to sort the schedule based on time.
+//		 * 
+//		 * @author Richard Park
+//		 */
+//		private class ScheduleComparator implements Comparator<Schedule> {
+//			@Override
+//			public int compare(Schedule arg0, Schedule arg1) {
+//				long first = arg1.getNextExecTime();
+//				long second = arg0.getNextExecTime();
+//
+//				if (first == second) {
+//					return 0;
+//				} else if (first < second) {
+//					return 1;
+//				}
+//
+//				return -1;
+//			}
+//		}
+//	}
 	
-	public boolean isThreadActive() {
-		return runner.isAlive();
-	}
+//	public long getLastCheckTime() {
+//		return lastCheckTime;
+//	}
+//	
+//	public long getNextUpdateTime() {
+//		return nextWakupTime;
+//	}
+//	
+//	public State getThreadState() {
+//		return runner.getState();
+//	}
+//	
+//	public boolean isThreadActive() {
+//		return runner.isAlive();
+//>>>>>>> df6eb48ad044ae68afffae2254991289792f33a0
+//	}
 
-	public String getThreadStage() {
-		return runnerStage;
+	@Override
+	public String getTriggerSource() {
+		return triggerSource;
 	}
 	
+
 }
diff --git a/src/java/azkaban/scheduler/ScheduleManagerException.java b/src/java/azkaban/scheduler/ScheduleManagerException.java
index 41e1da8..3ffb1b6 100644
--- a/src/java/azkaban/scheduler/ScheduleManagerException.java
+++ b/src/java/azkaban/scheduler/ScheduleManagerException.java
@@ -26,4 +26,8 @@ public class ScheduleManagerException extends Exception{
 	public ScheduleManagerException(String message, Throwable cause) {
 		super(message, cause);
 	}
+
+	public ScheduleManagerException(Exception e) {
+		super(e);
+	}
 }
diff --git a/src/java/azkaban/scheduler/ScheduleStatisticManager.java b/src/java/azkaban/scheduler/ScheduleStatisticManager.java
index beabbbf..d5de1e1 100644
--- a/src/java/azkaban/scheduler/ScheduleStatisticManager.java
+++ b/src/java/azkaban/scheduler/ScheduleStatisticManager.java
@@ -24,7 +24,7 @@ import java.util.List;
 import java.util.Map;
 
 import azkaban.executor.ExecutableFlow;
-import azkaban.executor.ExecutorManager;
+import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.executor.Status;
 import azkaban.utils.JSONUtils;
@@ -35,7 +35,7 @@ public class ScheduleStatisticManager {
 	private static File cacheDirectory;
 	private static final int STAT_NUMBERS = 10;
 
-	public static Map<String, Object> getStatistics(int scheduleId, AzkabanWebServer server) {
+	public static Map<String, Object> getStatistics(int scheduleId, AzkabanWebServer server) throws ScheduleManagerException {
 		if (cacheDirectory == null) {
 			setCacheFolder(new File(server.getServerProps().getString("cache.directory", "cache")));
 		}
@@ -52,9 +52,9 @@ public class ScheduleStatisticManager {
 		return data;
 	}
 
-	private static Map<String, Object> calculateStats(int scheduleId, AzkabanWebServer server) {
+	private static Map<String, Object> calculateStats(int scheduleId, AzkabanWebServer server) throws ScheduleManagerException {
 		Map<String, Object> data = new HashMap<String, Object>();
-		ExecutorManager executorManager = server.getExecutorManager();
+		ExecutorManagerAdapter executorManager = server.getExecutorManager();
 		ScheduleManager scheduleManager = server.getScheduleManager();
 		Schedule schedule = scheduleManager.getSchedule(scheduleId);
 
diff --git a/src/java/azkaban/scheduler/TriggerBasedScheduleLoader.java b/src/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
new file mode 100644
index 0000000..66bc178
--- /dev/null
+++ b/src/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
@@ -0,0 +1,206 @@
+package azkaban.scheduler;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+
+import azkaban.trigger.Condition;
+import azkaban.trigger.ConditionChecker;
+import azkaban.trigger.Trigger;
+import azkaban.trigger.TriggerAction;
+import azkaban.trigger.TriggerManager;
+import azkaban.trigger.TriggerManagerAdapter;
+import azkaban.trigger.TriggerManagerException;
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.trigger.builtin.ExecuteFlowAction;
+
+public class TriggerBasedScheduleLoader implements ScheduleLoader {
+	
+	private static Logger logger = Logger.getLogger(TriggerBasedScheduleLoader.class);
+	
+	private TriggerManagerAdapter triggerManager;
+	
+	private String triggerSource;
+	
+	private long lastUpdateTime = -1;
+	
+	public TriggerBasedScheduleLoader(TriggerManager triggerManager, String triggerSource) {
+		this.triggerManager = triggerManager;
+		this.triggerSource = triggerSource;
+//		// need to init the action types and condition checker types 
+//		ExecuteFlowAction.setExecutorManager(executorManager);
+//		ExecuteFlowAction.setProjectManager(projectManager);
+	}
+	
+	private Trigger scheduleToTrigger(Schedule s) {
+		Condition triggerCondition = createTriggerCondition(s);
+		Condition expireCondition = createExpireCondition(s);
+		List<TriggerAction> actions = createActions(s);
+		Trigger t = new Trigger(s.getScheduleId(), s.getLastModifyTime(), s.getSubmitTime(), s.getSubmitUser(), triggerSource, triggerCondition, expireCondition, actions);
+		if(s.isRecurring()) {
+			t.setResetOnTrigger(true);
+		} else {
+			t.setResetOnTrigger(false);
+		}
+		return t;
+	}
+	
+	private List<TriggerAction> createActions (Schedule s) {
+		List<TriggerAction> actions = new ArrayList<TriggerAction>();
+		ExecuteFlowAction executeAct = new ExecuteFlowAction("executeFlowAction", s.getProjectId(), s.getProjectName(), s.getFlowName(), s.getSubmitUser(), s.getExecutionOptions(), s.getSlaOptions());
+		actions.add(executeAct);
+//		List<SlaOption> slaOptions = s.getSlaOptions();
+//		if(slaOptions != null && slaOptions.size() > 0) {
+//			// insert a trigger to keep watching that execution
+//			for(SlaOption sla : slaOptions) {
+//				// need to create triggers for each sla
+//				SlaChecker slaChecker = new SlaChecker("slaChecker", sla, executeAct.getId());
+//				
+//			}
+//		}
+		
+		return actions;
+	}
+	
+	private Condition createTriggerCondition (Schedule s) {
+		Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+		ConditionChecker checker = new BasicTimeChecker("BasicTimeChecker_1", s.getFirstSchedTime(), s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(), s.getPeriod());
+		checkers.put(checker.getId(), checker);
+		String expr = checker.getId() + ".eval()";
+		Condition cond = new Condition(checkers, expr);
+		return cond;
+	}
+	
+	// if failed to trigger, auto expire?
+	private Condition createExpireCondition (Schedule s) {
+		Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+		ConditionChecker checker = new BasicTimeChecker("BasicTimeChecker_2", s.getFirstSchedTime(), s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(), s.getPeriod());
+		checkers.put(checker.getId(), checker);
+		String expr = checker.getId() + ".eval()";
+		Condition cond = new Condition(checkers, expr);
+		return cond;
+	}
+
+	@Override
+	public void insertSchedule(Schedule s) throws ScheduleManagerException {
+		Trigger t = scheduleToTrigger(s);
+		try {
+			triggerManager.insertTrigger(t, t.getSubmitUser());
+			s.setScheduleId(t.getTriggerId());
+		} catch (TriggerManagerException e) {
+			throw new ScheduleManagerException("Failed to insert new schedule!", e);
+		}
+	}
+
+	@Override
+	public void updateSchedule(Schedule s) throws ScheduleManagerException {
+		Trigger t = scheduleToTrigger(s);
+		try {
+			triggerManager.updateTrigger(t, t.getSubmitUser());
+		} catch (TriggerManagerException e) {
+			throw new ScheduleManagerException("Failed to update schedule!", e);
+		}
+	}
+
+	//TODO
+	// may need to add logic to filter out skip runs
+	@Override
+	public synchronized List<Schedule> loadSchedules() throws ScheduleManagerException {
+		List<Trigger> triggers = triggerManager.getTriggers(triggerSource);
+		List<Schedule> schedules = new ArrayList<Schedule>();
+//		triggersLocalCopy = new HashMap<Integer, Trigger>();
+		for(Trigger t : triggers) {
+			lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
+			Schedule s = triggerToSchedule(t);
+			schedules.add(s);
+			System.out.println("loaded schedule for " + s.getProjectId() + s.getProjectName());
+		}
+		return schedules;
+		
+	}
+	
+	private Schedule triggerToSchedule(Trigger t) throws ScheduleManagerException {
+		Condition triggerCond = t.getTriggerCondition();
+		Map<String, ConditionChecker> checkers = triggerCond.getCheckers();
+		BasicTimeChecker ck = null;
+		for(ConditionChecker checker : checkers.values()) {
+			if(checker.getType().equals(BasicTimeChecker.type)) {
+				ck = (BasicTimeChecker) checker;
+				break;
+			}
+		}
+		List<TriggerAction> actions = t.getActions();
+		ExecuteFlowAction act = null;
+		for(TriggerAction action : actions) {
+			if(action.getType().equals(ExecuteFlowAction.type)) {
+				act = (ExecuteFlowAction) action;
+				break;
+			}
+		}
+		if(ck != null && act != null) {
+			Schedule s = new Schedule(
+					t.getTriggerId(), 
+					act.getProjectId(), 
+					act.getProjectName(), 
+					act.getFlowName(), 
+					t.getStatus().toString(), 
+					ck.getFirstCheckTime(), 
+					ck.getTimeZone(), 
+					ck.getPeriod(),
+					t.getLastModifyTime(),
+					ck.getNextCheckTime(),
+					t.getSubmitTime(),
+					t.getSubmitUser(),
+					act.getExecutionOptions(),
+					act.getSlaOptions());
+			return s;
+		} else {
+			logger.error("Failed to parse schedule from trigger!");
+			throw new ScheduleManagerException("Failed to parse schedule from trigger!");
+		}
+	}
+
+	@Override
+	public void removeSchedule(Schedule s) throws ScheduleManagerException {
+		try {
+			triggerManager.removeTrigger(s.getScheduleId(), s.getSubmitUser());
+//			triggersLocalCopy.remove(s.getScheduleId());
+		} catch (TriggerManagerException e) {
+			// TODO Auto-generated catch block
+			throw new ScheduleManagerException(e.getMessage());
+		}
+		
+	}
+
+	@Override
+	public void updateNextExecTime(Schedule s)
+			throws ScheduleManagerException {
+//		Trigger t = triggersLocalCopy.get(s.getScheduleId());
+//		BasicTimeChecker ck = (BasicTimeChecker) t.getTriggerCondition().getCheckers().values().toArray()[0];
+//		s.setNextExecTime(ck.getNextCheckTime().getMillis());
+	}
+
+	@Override
+	public synchronized List<Schedule> loadUpdatedSchedules() throws ScheduleManagerException {
+		List<Trigger> triggers;
+		try {
+			triggers = triggerManager.getTriggerUpdates(triggerSource, lastUpdateTime);
+		} catch (TriggerManagerException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+			throw new ScheduleManagerException(e);
+		}
+		List<Schedule> schedules = new ArrayList<Schedule>();
+		for(Trigger t : triggers) {
+			lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
+			Schedule s = triggerToSchedule(t);
+			schedules.add(s);
+			System.out.println("loaded schedule for " + s.getProjectId() + s.getProjectName());
+		}
+		return schedules;
+	}
+
+}
diff --git a/src/java/azkaban/sla/SlaOption.java b/src/java/azkaban/sla/SlaOption.java
new file mode 100644
index 0000000..e8f7a04
--- /dev/null
+++ b/src/java/azkaban/sla/SlaOption.java
@@ -0,0 +1,164 @@
+package azkaban.sla;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.joda.time.DateTime;
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.DateTimeFormatter;
+
+import azkaban.executor.ExecutableFlow;
+
+public class SlaOption {
+	
+	public static final String TYPE_FLOW_FINISH = "FlowFinish";
+	public static final String TYPE_FLOW_SUCCEED = "FlowSucceed";
+	public static final String TYPE_FLOW_PROGRESS = "FlowProgress";
+	
+	public static final String TYPE_JOB_FINISH = "JobFinish";
+	public static final String TYPE_JOB_SUCCEED = "JobSucceed";
+	public static final String TYPE_JOB_PROGRESS = "JobProgress";
+	
+	public static final String INFO_DURATION = "Duration";
+	public static final String INFO_FLOW_NAME = "FlowName";
+	public static final String INFO_JOB_NAME = "JobName";
+	public static final String INFO_PROGRESS_PERCENT = "ProgressPercent";
+	public static final String INFO_EMAIL_LIST = "EmailList";
+	
+	// always alert
+	public static final String ALERT_TYPE = "SlaAlertType";
+	public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow";
+	public static final String ACTION_ALERT = "SlaAlert";
+	
+	private String type;
+	private Map<String, Object> info;
+	private List<String> actions;
+	
+	private static DateTimeFormatter fmt = DateTimeFormat.forPattern("MM/dd, YYYY HH:mm");
+	
+	public SlaOption(
+			String type,
+			List<String> actions,
+			Map<String, Object> info
+	) {
+		this.type = type;
+		this.info = info;
+		this.actions = actions;
+	}
+
+	public String getType() {
+		return type;
+	}
+
+	public void setType(String type) {
+		this.type = type;
+	}
+
+	public Map<String, Object> getInfo() {
+		return info;
+	}
+
+	public void setInfo(Map<String, Object> info) {
+		this.info = info;
+	}
+
+	public List<String> getActions() {
+		return actions;
+	}
+
+	public void setActions(List<String> actions) {
+		this.actions = actions;
+	}
+
+	public Map<String,Object> toObject() {
+		HashMap<String, Object> slaObj = new HashMap<String, Object>();
+
+		slaObj.put("type", type);
+		slaObj.put("info", info);
+		slaObj.put("actions", actions);
+
+		return slaObj;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static SlaOption fromObject(Object object) {
+
+		HashMap<String, Object> slaObj = (HashMap<String,Object>)object;
+
+		String type = (String) slaObj.get("type");
+		List<String> actions = (List<String>) slaObj.get("actions");
+		Map<String, Object> info = (Map<String, Object>) slaObj.get("info");
+
+		return new SlaOption(type, actions, info);
+	}
+
+	public Object toWebObject() {
+		HashMap<String, Object> slaObj = new HashMap<String, Object>();
+
+//		slaObj.put("type", type);
+//		slaObj.put("info", info);
+//		slaObj.put("actions", actions);
+		if(type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_FLOW_SUCCEED)) {
+			slaObj.put("id", "");
+		} else {
+			slaObj.put("id", info.get(INFO_JOB_NAME));
+		}
+		slaObj.put("duration", info.get(INFO_DURATION));
+		if(type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_JOB_FINISH)) {
+			slaObj.put("rule", "FINISH");
+		} else {
+			slaObj.put("rule", "SUCCESS");
+		} 
+		List<String> actionsObj = new ArrayList<String>();
+		for(String act : actions) {
+			if(act.equals(ACTION_ALERT)) {
+				actionsObj.add("EMAIL");
+			}
+			else {
+				actionsObj.add("KILL");
+			}
+		}
+		slaObj.put("actions", actionsObj);
+		
+		return slaObj;
+	}
+	
+	@Override
+	public String toString() {
+		return "Sla of " + getType() +  getInfo() + getActions();
+	}
+	
+	public static String createSlaMessage(SlaOption slaOption, ExecutableFlow flow) {
+		String type = slaOption.getType();
+		int execId = flow.getExecutionId();
+		if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+			String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+			String basicinfo =  "SLA Alert: Your flow " + flowName + " failed to FINISH within " + duration + "</br>";
+			String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " is expected to FINISH within " + duration + " from " + fmt.print(new DateTime(flow.getStartTime())) + "</br>"; 
+			String actual = "Actual flow status is " + flow.getStatus();
+			return basicinfo + expected + actual;
+		} else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+			String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+			String basicinfo =  "SLA Alert: Your flow " + flowName + " failed to SUCCEED within " + duration + "</br>";
+			String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " expected to FINISH within " + duration + " from " + fmt.print(new DateTime(flow.getStartTime())) + "</br>"; 
+			String actual = "Actual flow status is " + flow.getStatus();
+			return basicinfo + expected + actual;
+		} else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
+			String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+			return "SLA Alert: Your job " + jobName + " failed to FINISH within " + duration + " in execution " + execId;
+		} else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+			String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+			return "SLA Alert: Your job " + jobName + " failed to SUCCEED within " + duration + " in execution " + execId;
+		} else {
+			return "Unrecognized SLA type " + type;
+		}
+	}
+
+
+}
diff --git a/src/java/azkaban/trigger/ActionTypeLoader.java b/src/java/azkaban/trigger/ActionTypeLoader.java
new file mode 100644
index 0000000..4ca9436
--- /dev/null
+++ b/src/java/azkaban/trigger/ActionTypeLoader.java
@@ -0,0 +1,163 @@
+package azkaban.trigger;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.log4j.Logger;
+
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
+
+public class ActionTypeLoader {
+	
+	private static Logger logger = Logger.getLogger(ActionTypeLoader.class);
+	
+	public static final String DEFAULT_TRIGGER_ACTION_PLUGIN_DIR = "plugins/triggeractions";
+	private static final String ACTIONTYPECONFFILE = "plugin.properties"; // need jars.to.include property, will be loaded with user property
+	private static final String COMMONCONFFILE = "common.properties";	// common properties for multiple plugins
+
+	protected static Map<String, Class<? extends TriggerAction>> actionToClass = new HashMap<String, Class<? extends TriggerAction>>();
+	
+	public void init(Props props) throws TriggerException {
+		// load built-in actions
+		
+//
+//		loadBuiltinActions();
+//		
+//		loadPluginActions(props);
+
+	}
+	
+	public synchronized void registerActionType(String type, Class<? extends TriggerAction> actionClass) {
+		logger.info("Registering action " + type);
+		if(!actionToClass.containsKey(type)) {
+			actionToClass.put(type, actionClass);
+		}
+	}
+	
+//	private void loadPluginActions(Props props) throws TriggerException {
+//		String checkerDir = props.getString("azkaban.trigger.action.plugin.dir", DEFAULT_TRIGGER_ACTION_PLUGIN_DIR);
+//		File pluginDir = new File(checkerDir);
+//		if(!pluginDir.exists() || !pluginDir.isDirectory() || !pluginDir.canRead()) {
+//			logger.info("No trigger action plugins to load.");
+//			return;
+//		}
+//		
+//		logger.info("Loading plugin trigger actions from " + pluginDir);
+//		ClassLoader parentCl = this.getClass().getClassLoader();
+//		
+//		Props globalActionConf = null;
+//		File confFile = Utils.findFilefromDir(pluginDir, COMMONCONFFILE);
+//		try {
+//			if(confFile != null) {
+//				globalActionConf = new Props(null, confFile);
+//			} else {
+//				globalActionConf = new Props();
+//			}
+//		} catch (IOException e) {
+//			throw new TriggerException("Failed to get global properties." + e);
+//		}
+//		
+//		for(File dir : pluginDir.listFiles()) {
+//			if(dir.isDirectory() && dir.canRead()) {
+//				try {
+//					loadPluginTypes(globalActionConf, pluginDir, parentCl);
+//				} catch (Exception e) {
+//					logger.info("Plugin actions failed to load. " + e.getCause());
+//					throw new TriggerException("Failed to load all trigger actions!", e);
+//				}
+//			}
+//		}
+//	}
+//	
+//	@SuppressWarnings("unchecked")
+//	private void loadPluginTypes(Props globalConf, File dir, ClassLoader parentCl) throws TriggerException {
+//		Props actionConf = null;
+//		File confFile = Utils.findFilefromDir(dir, ACTIONTYPECONFFILE);
+//		if(confFile == null) {
+//			logger.info("No action type found in " + dir.getAbsolutePath());
+//			return;
+//		}
+//		try {
+//			actionConf = new Props(globalConf, confFile);
+//		} catch (IOException e) {
+//			throw new TriggerException("Failed to load config for the action type", e);
+//		}
+//		
+//		String actionName = dir.getName();
+//		String actionClass = actionConf.getString("action.class");
+//		
+//		List<URL> resources = new ArrayList<URL>();		
+//		for(File f : dir.listFiles()) {
+//			try {
+//				if(f.getName().endsWith(".jar")) {
+//					resources.add(f.toURI().toURL());
+//					logger.info("adding to classpath " + f.toURI().toURL());
+//				}
+//			} catch (MalformedURLException e) {
+//				// TODO Auto-generated catch block
+//				throw new TriggerException(e);
+//			}
+//		}
+//		
+//		// each job type can have a different class loader
+//		ClassLoader actionCl = new URLClassLoader(resources.toArray(new URL[resources.size()]), parentCl);
+//		
+//		Class<? extends TriggerAction> clazz = null;
+//		try {
+//			clazz = (Class<? extends TriggerAction>)actionCl.loadClass(actionClass);
+//			actionToClass.put(actionName, clazz);
+//		}
+//		catch (ClassNotFoundException e) {
+//			throw new TriggerException(e);
+//		}
+//		
+//		if(actionConf.getBoolean("need.init")) {
+//			try {
+//				Utils.invokeStaticMethod(actionCl, actionClass, "init", actionConf);
+//			} catch (Exception e) {
+//				e.printStackTrace();
+//				logger.error("Failed to init the action type " + actionName);
+//				throw new TriggerException(e);
+//			}
+//		}
+//		
+//		logger.info("Loaded action type " + actionName + " " + actionClass);
+//	}
+//	
+//	private void loadBuiltinActions() {
+//		actionToClass.put(ExecuteFlowAction.type, ExecuteFlowAction.class);		
+//		logger.info("Loaded ExecuteFlowAction type.");
+//	}
+	
+	public static void registerBuiltinActions(Map<String, Class<? extends TriggerAction>> builtinActions) {
+		actionToClass.putAll(builtinActions);
+		for(String type : builtinActions.keySet()) {
+			logger.info("Loaded " + type + " action.");
+		}
+	}
+	
+	public TriggerAction createActionFromJson(String type, Object obj) throws Exception {
+		TriggerAction action = null;
+		Class<? extends TriggerAction> actionClass = actionToClass.get(type);		
+		if(actionClass == null) {
+			throw new Exception("Action Type " + type + " not supported!");
+		}
+		action = (TriggerAction) Utils.invokeStaticMethod(actionClass.getClassLoader(), actionClass.getName(), "createFromJson", obj);
+		
+		return action;
+	}
+	
+	public TriggerAction createAction(String type, Object ... args) {
+		TriggerAction action = null;
+		Class<? extends TriggerAction> actionClass = actionToClass.get(type);		
+		action = (TriggerAction) Utils.callConstructor(actionClass, args);
+		
+		return action;
+	}
+	
+	public Set<String> getSupportedActions() {
+		return actionToClass.keySet();
+	}
+}
diff --git a/src/java/azkaban/trigger/builtin/BasicTimeChecker.java b/src/java/azkaban/trigger/builtin/BasicTimeChecker.java
new file mode 100644
index 0000000..8dbc1cc
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/BasicTimeChecker.java
@@ -0,0 +1,188 @@
+package azkaban.trigger.builtin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import org.joda.time.ReadablePeriod;
+
+import azkaban.trigger.ConditionChecker;
+import azkaban.utils.Utils;
+
+public class BasicTimeChecker implements ConditionChecker {
+
+	public static final String type = "BasicTimeChecker";
+	
+	private long firstCheckTime;
+	private long nextCheckTime;
+	private DateTimeZone timezone;
+	private boolean isRecurring = true;
+	private boolean skipPastChecks = true;
+	private ReadablePeriod period;
+	
+	private final String id; 
+	
+	public BasicTimeChecker(
+			String id,
+			long firstCheckTime,
+			DateTimeZone timezone,
+			boolean isRecurring, 
+			boolean skipPastChecks,
+			ReadablePeriod period) {
+		this.id = id;
+		this.firstCheckTime = firstCheckTime;
+		this.timezone = timezone;
+		this.isRecurring = isRecurring;
+		this.skipPastChecks = skipPastChecks;
+		this.period = period;
+		this.nextCheckTime = firstCheckTime;
+		this.nextCheckTime = calculateNextCheckTime();
+	}
+	
+	public long getFirstCheckTime() {
+		return firstCheckTime;
+	}
+	
+	public DateTimeZone getTimeZone() {
+		return timezone;
+	}
+
+	public boolean isRecurring() {
+		return isRecurring;
+	}
+
+	public boolean isSkipPastChecks() {
+		return skipPastChecks;
+	}
+
+	public ReadablePeriod getPeriod() {
+		return period;
+	}
+
+	public long getNextCheckTime() {
+		return nextCheckTime;
+	}
+	
+	public BasicTimeChecker(
+			String id,
+			long firstCheckTime,
+			DateTimeZone timezone,
+			long nextCheckTime,
+			boolean isRecurring, 
+			boolean skipPastChecks,
+			ReadablePeriod period) {
+		this.id = id;
+		this.firstCheckTime = firstCheckTime;
+		this.timezone = timezone;
+		this.nextCheckTime = nextCheckTime;
+		this.isRecurring = isRecurring;
+		this.skipPastChecks = skipPastChecks;
+		this.period = period;
+	}
+	
+	@Override
+	public Boolean eval() {
+		return nextCheckTime < System.currentTimeMillis();
+	}
+
+	@Override
+	public void reset() {
+		this.nextCheckTime = calculateNextCheckTime();
+	}
+	
+	@Override
+	public String getId() {
+		return id;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static BasicTimeChecker createFromJson(Object obj) throws Exception {
+		return createFromJson((HashMap<String, Object>)obj);
+	}
+	
+	public static BasicTimeChecker createFromJson(HashMap<String, Object> obj) throws Exception {
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		if(!jsonObj.get("type").equals(type)) {
+			throw new Exception("Cannot create checker of " + type + " from " + jsonObj.get("type"));
+		}
+		Long firstCheckTime = Long.valueOf((String) jsonObj.get("firstCheckTime"));
+		String timezoneId = (String) jsonObj.get("timezone");
+		long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
+		DateTimeZone timezone = DateTimeZone.forID(timezoneId);
+		boolean isRecurring = Boolean.valueOf((String)jsonObj.get("isRecurring"));
+		boolean skipPastChecks = Boolean.valueOf((String)jsonObj.get("skipPastChecks"));
+		ReadablePeriod period = Utils.parsePeriodString((String)jsonObj.get("period"));
+		String id = (String) jsonObj.get("id");
+
+		BasicTimeChecker checker = new BasicTimeChecker(id, firstCheckTime, timezone, nextCheckTime, isRecurring, skipPastChecks, period);
+		if(skipPastChecks) {
+			checker.updateNextCheckTime();
+		}
+		return checker;
+	}
+	
+	@Override
+	public BasicTimeChecker fromJson(Object obj) throws Exception{
+		return createFromJson(obj);
+	}
+	
+	private void updateNextCheckTime(){
+		nextCheckTime = calculateNextCheckTime();
+	}
+	
+	private long calculateNextCheckTime(){
+		DateTime date = new DateTime(nextCheckTime).withZone(timezone);
+		int count = 0;
+		while(!date.isAfterNow()) {
+			if(count > 100000) {
+				throw new IllegalStateException("100000 increments of period did not get to present time.");
+			}
+			if(period == null) {
+				break;
+			}else {
+				date = date.plus(period);
+			}
+			count += 1;
+			if(!skipPastChecks) {
+				continue;
+			}
+		}
+		return date.getMillis();
+	}
+	
+	@Override
+	public Object getNum() {
+		return null;
+	}
+
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("type", type);
+		jsonObj.put("firstCheckTime", String.valueOf(firstCheckTime));
+		jsonObj.put("timezone", timezone.getID());
+		jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
+		jsonObj.put("isRecurrint", String.valueOf(isRecurring));
+		jsonObj.put("skipPastChecks", String.valueOf(skipPastChecks));
+		jsonObj.put("period", Utils.createPeriodString(period));
+		jsonObj.put("id", id);
+		
+		return jsonObj;
+	}
+
+	@Override
+	public void stopChecker() {
+		return;
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+	}
+
+}
diff --git a/src/java/azkaban/trigger/builtin/CreateTriggerAction.java b/src/java/azkaban/trigger/builtin/CreateTriggerAction.java
new file mode 100644
index 0000000..b4c5f84
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/CreateTriggerAction.java
@@ -0,0 +1,79 @@
+package azkaban.trigger.builtin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import azkaban.trigger.Trigger;
+import azkaban.trigger.TriggerAction;
+import azkaban.trigger.TriggerManager;
+
+public class CreateTriggerAction implements TriggerAction {
+	
+	public static final String type = "CreateTriggerAction";
+	private static TriggerManager triggerManager;
+	private Trigger trigger;
+	private Map<String, Object> context;
+	private String actionId;
+	
+	public CreateTriggerAction(String actionId, Trigger trigger) {
+		this.actionId = actionId;
+		this.trigger = trigger;
+	}
+	
+	@Override
+	public String getType() {
+		return type;
+	}
+	
+	public static void setTriggerManager(TriggerManager trm) {
+		triggerManager = trm;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static CreateTriggerAction createFromJson(Object obj) throws Exception {
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		if(!jsonObj.get("type").equals(type)) {
+			throw new Exception("Cannot create action of " + type + " from " + jsonObj.get("type"));
+		}
+		String actionId = (String) jsonObj.get("actionId");
+		Trigger trigger = Trigger.fromJson(jsonObj.get("trigger"));
+		return new CreateTriggerAction(actionId, trigger);
+	}
+	
+	@Override
+	public CreateTriggerAction fromJson(Object obj) throws Exception {
+		// TODO Auto-generated method stub
+		return createFromJson(obj);
+	}
+
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("actionId", actionId);
+		jsonObj.put("type", type);
+		jsonObj.put("trigger", trigger.toJson());
+
+		return jsonObj;
+	}
+
+	@Override
+	public void doAction() throws Exception {
+		triggerManager.insertTrigger(trigger);
+	}
+
+	@Override
+	public String getDescription() {
+		return "create another: " + trigger.getDescription();
+	}
+
+	@Override
+	public String getId() {
+		return actionId;
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+		this.context = context;
+	}
+
+}
diff --git a/src/java/azkaban/trigger/builtin/ExecuteFlowAction.java b/src/java/azkaban/trigger/builtin/ExecuteFlowAction.java
new file mode 100644
index 0000000..d17aa9d
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/ExecuteFlowAction.java
@@ -0,0 +1,276 @@
+package azkaban.trigger.builtin;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+
+import azkaban.executor.ExecutableFlow;
+import azkaban.executor.ExecutionOptions;
+import azkaban.executor.ExecutorManagerAdapter;
+import azkaban.executor.ExecutorManagerException;
+import azkaban.executor.Status;
+import azkaban.flow.Flow;
+import azkaban.project.Project;
+import azkaban.project.ProjectManager;
+import azkaban.sla.SlaOption;
+import azkaban.trigger.Condition;
+import azkaban.trigger.ConditionChecker;
+import azkaban.trigger.Trigger;
+import azkaban.trigger.TriggerAction;
+import azkaban.trigger.TriggerManager;
+
+public class ExecuteFlowAction implements TriggerAction {
+
+	public static final String type = "ExecuteFlowAction";
+
+	public static final String EXEC_ID = "ExecuteFlowAction.execid";
+	
+	private static ExecutorManagerAdapter executorManager;
+	private static TriggerManager triggerManager;
+	private String actionId;
+	private int projectId;
+	private String projectName;
+	private String flowName;
+	private String submitUser;
+	private static ProjectManager projectManager;
+	private ExecutionOptions executionOptions = new ExecutionOptions();
+	private List<SlaOption> slaOptions;
+	
+	private static Logger logger = Logger.getLogger(ExecuteFlowAction.class);
+	
+	public ExecuteFlowAction(String actionId, int projectId, String projectName, String flowName, String submitUser, ExecutionOptions executionOptions, List<SlaOption> slaOptions) {
+		this.actionId = actionId;
+		this.projectId = projectId;
+		this.projectName = projectName;
+		this.flowName = flowName;
+		this.submitUser = submitUser;
+		this.executionOptions = executionOptions;
+		this.slaOptions = slaOptions;
+	}
+	
+	public static void setLogger(Logger logger) {
+		ExecuteFlowAction.logger = logger;
+	}
+	
+	public String getProjectName() {
+		return projectName;
+	}
+
+	public int getProjectId() {
+		return projectId;
+	}
+
+	protected void setProjectId(int projectId) {
+		this.projectId = projectId;
+	}
+
+	public String getFlowName() {
+		return flowName;
+	}
+
+	protected void setFlowName(String flowName) {
+		this.flowName = flowName;
+	}
+
+	public String getSubmitUser() {
+		return submitUser;
+	}
+
+	protected void setSubmitUser(String submitUser) {
+		this.submitUser = submitUser;
+	}
+
+	public ExecutionOptions getExecutionOptions() {
+		return executionOptions;
+	}
+
+	protected void setExecutionOptions(ExecutionOptions executionOptions) {
+		this.executionOptions = executionOptions;
+	}
+	
+	public List<SlaOption> getSlaOptions() {
+		return slaOptions;
+	}
+
+	protected void setSlaOptions(List<SlaOption> slaOptions) {
+		this.slaOptions = slaOptions;
+	}
+
+	public static ExecutorManagerAdapter getExecutorManager() {
+		return executorManager;
+	}
+ 	
+	public static void setExecutorManager(ExecutorManagerAdapter executorManager) {
+		ExecuteFlowAction.executorManager = executorManager;
+	}
+	
+	public static TriggerManager getTriggerManager() {
+		return triggerManager;
+	}
+ 	
+	public static void setTriggerManager(TriggerManager triggerManager) {
+		ExecuteFlowAction.triggerManager = triggerManager;
+	}
+
+	public static ProjectManager getProjectManager() {
+		return projectManager;
+	}
+	
+	public static void setProjectManager(ProjectManager projectManager) {
+		ExecuteFlowAction.projectManager = projectManager;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@SuppressWarnings("unchecked")
+	@Override
+	public TriggerAction fromJson(Object obj) {
+		return createFromJson((HashMap<String, Object>) obj);
+	}
+
+	@SuppressWarnings("unchecked")
+	public static TriggerAction createFromJson(HashMap<String, Object> obj) {
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		String objType = (String) jsonObj.get("type");
+		if(! objType.equals(type)) {
+			throw new RuntimeException("Cannot create action of " + type + " from " + objType);
+		}
+		String actionId = (String) jsonObj.get("actionId");
+		int projectId = Integer.valueOf((String)jsonObj.get("projectId"));
+		String projectName = (String) jsonObj.get("projectName");
+		String flowName = (String) jsonObj.get("flowName");
+		String submitUser = (String) jsonObj.get("submitUser");
+		ExecutionOptions executionOptions = null;
+		if(jsonObj.containsKey("executionOptions")) {
+			executionOptions = ExecutionOptions.createFromObject(jsonObj.get("executionOptions"));
+		}
+		List<SlaOption> slaOptions = null;
+		if(jsonObj.containsKey("slaOptions")) {
+			slaOptions = new ArrayList<SlaOption>();
+			List<Object> slaOptionsObj = (List<Object>) jsonObj.get("slaOptions");
+			for(Object slaObj : slaOptionsObj) {
+				slaOptions.add(SlaOption.fromObject(slaObj));
+			}
+		}
+		return new ExecuteFlowAction(actionId, projectId, projectName, flowName, submitUser, executionOptions, slaOptions);
+	}
+	
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("actionId", actionId);
+		jsonObj.put("type", type);
+		jsonObj.put("projectId", String.valueOf(projectId));
+		jsonObj.put("projectName", projectName);
+		jsonObj.put("flowName", flowName);
+		jsonObj.put("submitUser", submitUser);
+		if(executionOptions != null) {
+			jsonObj.put("executionOptions", executionOptions.toObject());
+		}
+		if(slaOptions != null) {
+			List<Object> slaOptionsObj = new ArrayList<Object>();
+			for(SlaOption sla : slaOptions) {
+				slaOptionsObj.add(sla.toObject());
+			}
+			jsonObj.put("slaOptions", slaOptionsObj);
+		}
+		return jsonObj;
+	}
+
+	@Override
+	public void doAction() throws Exception {
+		if(projectManager == null || executorManager == null) {
+			throw new Exception("ExecuteFlowAction not properly initialized!");
+		}
+		
+		Project project = projectManager.getProject(projectId);
+		if(project == null) {
+			logger.error("Project to execute " + projectId + " does not exist!");
+			throw new RuntimeException("Error finding the project to execute " + projectId);
+		}
+		
+		Flow flow = project.getFlow(flowName);
+		if(flow == null) {
+			logger.error("Flow " + flowName + " cannot be found in project " + project.getName());
+			throw new RuntimeException("Error finding the flow to execute " + flowName);
+		}
+		
+		ExecutableFlow exflow = new ExecutableFlow(flow);
+		exflow.setSubmitUser(submitUser);
+		exflow.addAllProxyUsers(project.getProxyUsers());
+		
+		if(!executionOptions.isFailureEmailsOverridden()) {
+			executionOptions.setFailureEmails(flow.getFailureEmails());
+		}
+		if(!executionOptions.isSuccessEmailsOverridden()) {
+			executionOptions.setSuccessEmails(flow.getSuccessEmails());
+		}
+		exflow.setExecutionOptions(executionOptions);
+		
+		try{
+			executorManager.submitExecutableFlow(exflow, submitUser);
+//			Map<String, Object> outputProps = new HashMap<String, Object>();
+//			outputProps.put(EXEC_ID, exflow.getExecutionId());
+//			context.put(actionId, outputProps);
+			logger.info("Invoked flow " + project.getName() + "." + flowName);
+		} catch (ExecutorManagerException e) {
+			throw new RuntimeException(e);
+		}
+		
+		// deal with sla
+		if(slaOptions != null && slaOptions.size() > 0) {
+			int execId = exflow.getExecutionId();
+			for(SlaOption sla : slaOptions) {
+				logger.info("Adding sla trigger " + sla.toString() + " to execution " + execId);
+				SlaChecker slaChecker = new SlaChecker("slaChecker", sla, execId);
+				Map<String, ConditionChecker> slaCheckers = new HashMap<String, ConditionChecker>();
+				slaCheckers.put(slaChecker.getId(), slaChecker);
+				Condition triggerCond = new Condition(slaCheckers, slaChecker.getId() + ".eval()");
+				// if whole flow finish before violate sla, just abort
+				ExecutionChecker execChecker = new ExecutionChecker("execChecker", execId, null, Status.SUCCEEDED);
+				Map<String, ConditionChecker> expireCheckers = new HashMap<String, ConditionChecker>();
+				expireCheckers.put(execChecker.getId(), execChecker);
+				Condition expireCond = new Condition(expireCheckers, execChecker.getId() + ".eval()");
+				List<TriggerAction> actions = new ArrayList<TriggerAction>();
+				List<String> slaActions = sla.getActions();
+				for(String act : slaActions) {
+					if(act.equals(SlaOption.ACTION_ALERT)) {
+						SlaAlertAction slaAlert = new SlaAlertAction("slaAlert", sla, execId);
+						actions.add(slaAlert);
+					} else if(act.equals(SlaOption.ACTION_CANCEL_FLOW)) {
+						KillExecutionAction killAct = new KillExecutionAction("killExecution", execId);
+						actions.add(killAct);
+					}
+				}
+				Trigger slaTrigger = new Trigger("azkaban_sla", "azkaban", triggerCond, expireCond, actions);
+				slaTrigger.setResetOnTrigger(false);
+				slaTrigger.setResetOnExpire(false);
+				logger.info("Ready to put in the sla trigger");
+				triggerManager.insertTrigger(slaTrigger);
+			}
+		}
+		
+	}
+
+	@Override
+	public String getDescription() {
+		return "Execute flow " + getFlowName() + 
+				" from project " + getProjectName();
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+	}
+
+	@Override
+	public String getId() {
+		return actionId;
+	}
+
+}
diff --git a/src/java/azkaban/trigger/builtin/ExecutionChecker.java b/src/java/azkaban/trigger/builtin/ExecutionChecker.java
new file mode 100644
index 0000000..2906b5b
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/ExecutionChecker.java
@@ -0,0 +1,123 @@
+package azkaban.trigger.builtin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import azkaban.executor.ExecutableFlow;
+import azkaban.executor.ExecutableNode;
+import azkaban.executor.ExecutorManagerAdapter;
+import azkaban.executor.ExecutorManagerException;
+import azkaban.executor.Status;
+import azkaban.trigger.ConditionChecker;
+
+public class ExecutionChecker implements ConditionChecker{
+
+	public static final String type = "ExecutionChecker";
+	public static ExecutorManagerAdapter executorManager;
+	
+	private String checkerId;
+	private int execId;
+	private String jobName;
+	private Status wantedStatus;
+	
+	public ExecutionChecker(String checkerId, int execId, String jobName, Status wantedStatus) {
+		this.checkerId = checkerId;
+		this.execId = execId;
+		this.jobName = jobName;
+		this.wantedStatus = wantedStatus;
+	}
+	
+	public static void setExecutorManager(ExecutorManagerAdapter em) {
+		executorManager = em;
+	}
+	
+	@Override
+	public Object eval() {
+		ExecutableFlow exflow;
+		try {
+			exflow = executorManager.getExecutableFlow(execId);
+		} catch (ExecutorManagerException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+			return Boolean.FALSE;
+		}
+		if(jobName != null) {
+			ExecutableNode job = exflow.getExecutableNode(jobName);
+			if(job != null) {
+				return job.getStatus().equals(wantedStatus);
+			} else {
+				return Boolean.FALSE;
+			}
+		} else {
+			return exflow.getStatus().equals(wantedStatus);
+		}
+		
+	}
+
+	@Override
+	public Object getNum() {
+		return null;
+	}
+
+	@Override
+	public void reset() {
+	}
+
+	@Override
+	public String getId() {
+		return checkerId;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	public static ExecutionChecker createFromJson(HashMap<String, Object> jsonObj) throws Exception {
+		if(!jsonObj.get("type").equals(type)) {
+			throw new Exception("Cannot create checker of " + type + " from " + jsonObj.get("type"));
+		}
+		int execId = Integer.valueOf((String) jsonObj.get("execId"));
+		String jobName = null;
+		if(jsonObj.containsKey("jobName")) {
+			jobName = (String) jsonObj.get("jobName");
+		}
+		String checkerId = (String) jsonObj.get("checkerId");
+		Status wantedStatus = Status.valueOf((String)jsonObj.get("wantedStatus"));
+
+		return new ExecutionChecker(checkerId, execId, jobName, wantedStatus);
+	}
+	
+	@SuppressWarnings("unchecked")
+	@Override
+	public ConditionChecker fromJson(Object obj) throws Exception {
+		return createFromJson((HashMap<String, Object>) obj);
+	}
+
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("type", type);
+		jsonObj.put("execId", String.valueOf(execId));
+		if(jobName != null) {
+			jsonObj.put("jobName", jobName);
+		}
+		jsonObj.put("wantedStatus", wantedStatus.toString());
+		jsonObj.put("checkerId", checkerId);
+		return jsonObj;
+	}
+
+	@Override
+	public void stopChecker() {
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+	}
+
+	@Override
+	public long getNextCheckTime() {
+		return -1;
+	}
+
+}
diff --git a/src/java/azkaban/trigger/builtin/KillExecutionAction.java b/src/java/azkaban/trigger/builtin/KillExecutionAction.java
new file mode 100644
index 0000000..74633e9
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/KillExecutionAction.java
@@ -0,0 +1,93 @@
+package azkaban.trigger.builtin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+
+import azkaban.executor.ExecutableFlow;
+import azkaban.executor.ExecutorManagerAdapter;
+import azkaban.trigger.TriggerAction;
+
+public class KillExecutionAction implements TriggerAction{
+
+	public static final String type = "KillExecutionAction";
+	
+	private static final Logger logger = Logger.getLogger(KillExecutionAction.class);
+	
+	private String actionId;
+	private int execId;
+	private static ExecutorManagerAdapter executorManager;
+	
+	public KillExecutionAction(String actionId, int execId) {
+		this.execId = execId;
+		this.actionId = actionId;
+	}
+	
+	public static void setExecutorManager(ExecutorManagerAdapter em) {
+		executorManager = em;
+	}
+	
+	@Override
+	public String getId() {
+		return actionId;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static KillExecutionAction createFromJson(Object obj) {
+		return createFromJson((HashMap<String, Object>)obj);
+	}
+	
+	public static KillExecutionAction createFromJson(HashMap<String, Object> obj) {
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		String objType = (String) jsonObj.get("type");
+		if(! objType.equals(type)) {
+			throw new RuntimeException("Cannot create action of " + type + " from " + objType);
+		}
+		String actionId = (String) jsonObj.get("actionId");
+		int execId = Integer.valueOf((String) jsonObj.get("execId"));
+		return new KillExecutionAction(actionId, execId);
+	}
+	
+	@SuppressWarnings("unchecked")
+	@Override
+	public KillExecutionAction fromJson(Object obj) throws Exception {
+		return createFromJson((HashMap<String, Object>)obj);
+	}
+
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("actionId", actionId);
+		jsonObj.put("type", type);
+		jsonObj.put("execId", String.valueOf(execId));
+		return jsonObj;
+	}
+
+	@Override
+	public void doAction() throws Exception {
+		ExecutableFlow exFlow = executorManager.getExecutableFlow(execId);
+		logger.info("ready to kill execution " + execId);
+		if(!ExecutableFlow.isFinished(exFlow)) {
+			logger.info("Killing execution " + execId);
+			executorManager.cancelFlow(exFlow, "azkaban_sla");
+		}
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public String getDescription() {
+		return type + " for " + execId;
+	}
+
+}
diff --git a/src/java/azkaban/trigger/builtin/SendEmailAction.java b/src/java/azkaban/trigger/builtin/SendEmailAction.java
new file mode 100644
index 0000000..c1b6efc
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/SendEmailAction.java
@@ -0,0 +1,91 @@
+package azkaban.trigger.builtin;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import azkaban.trigger.TriggerAction;
+import azkaban.utils.AbstractMailer;
+import azkaban.utils.EmailMessage;
+import azkaban.utils.Props;
+
+public class SendEmailAction implements TriggerAction {
+	
+	private String actionId;
+	private static AbstractMailer mailer;
+	private String message;
+	public static final String type = "SendEmailAction";
+	private String mimetype = "text/html";
+	private List<String> emailList;
+	private String subject;
+	
+	public static void init(Props props) {
+		mailer = new AbstractMailer(props);
+	}
+	
+	public SendEmailAction(String actionId, String subject, String message, List<String> emailList) {
+		this.actionId = actionId;
+		this.message = message;
+		this.subject = subject;
+		this.emailList = emailList;
+	}
+	
+	@Override
+	public String getId() {
+		return actionId;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static SendEmailAction createFromJson(Object obj) throws Exception {
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		if(!jsonObj.get("type").equals(type)) {
+			throw new Exception("Cannot create action of " + type + " from " + jsonObj.get("type"));
+		}
+		String actionId = (String) jsonObj.get("actionId");
+		String subject = (String) jsonObj.get("subject");
+		String message = (String) jsonObj.get("message");
+		List<String> emailList = (List<String>) jsonObj.get("emailList");
+		return new SendEmailAction(actionId, subject, message, emailList);
+	}
+	
+	@Override
+	public TriggerAction fromJson(Object obj) throws Exception {
+		return createFromJson(obj);
+	}
+
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("actionId", actionId);
+		jsonObj.put("type", type);
+		jsonObj.put("subject", subject);
+		jsonObj.put("message", message);
+		jsonObj.put("emailList", emailList);
+
+		return jsonObj;
+	}
+
+	@Override
+	public void doAction() throws Exception {
+		EmailMessage email = mailer.prepareEmailMessage(subject, mimetype, emailList);
+		email.setBody(message);
+		email.sendEmail();
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+		
+	}
+
+	@Override
+	public String getDescription() {
+		return type;
+	}
+
+	
+}
diff --git a/src/java/azkaban/trigger/builtin/SlaAlertAction.java b/src/java/azkaban/trigger/builtin/SlaAlertAction.java
new file mode 100644
index 0000000..ec8dd43
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/SlaAlertAction.java
@@ -0,0 +1,156 @@
+package azkaban.trigger.builtin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+
+import azkaban.alert.Alerter;
+import azkaban.executor.ExecutableFlow;
+import azkaban.executor.ExecutorManagerAdapter;
+import azkaban.sla.SlaOption;
+import azkaban.trigger.TriggerAction;
+
+public class SlaAlertAction implements TriggerAction{
+
+	public static final String type = "AlertAction";
+	
+	private static final Logger logger = Logger.getLogger(SlaAlertAction.class);
+	
+	private String actionId;
+	private SlaOption slaOption;
+	private int execId;
+//	private List<Map<String, Object>> alerts;
+	private static Map<String, azkaban.alert.Alerter> alerters;
+	private static ExecutorManagerAdapter executorManager;
+
+	public SlaAlertAction(String id, SlaOption slaOption, int execId) {
+		this.actionId = id;
+		this.slaOption = slaOption;
+		this.execId = execId;
+//		this.alerts = alerts;
+	}
+	
+	public static void setAlerters(Map<String, Alerter> alts) {
+		alerters = alts;
+	}
+	
+	public static void setExecutorManager(ExecutorManagerAdapter em) {
+		executorManager = em;
+	}
+	
+	@Override
+	public String getId() {
+		return actionId;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static SlaAlertAction createFromJson(Object obj) throws Exception {
+		return createFromJson((HashMap<String, Object>) obj);
+	}
+	
+	public static SlaAlertAction createFromJson(HashMap<String, Object> obj) throws Exception {
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		if(!jsonObj.get("type").equals(type)) {
+			throw new Exception("Cannot create action of " + type + " from " + jsonObj.get("type"));
+		}
+		String actionId = (String) jsonObj.get("actionId");
+		SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
+		int execId = Integer.valueOf((String) jsonObj.get("execId"));
+//		List<Map<String, Object>> alerts = (List<Map<String, Object>>) jsonObj.get("alerts");
+		return new SlaAlertAction(actionId, slaOption, execId);
+	}
+	
+	@Override
+	public TriggerAction fromJson(Object obj) throws Exception {
+		return createFromJson(obj);
+	}
+
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("actionId", actionId);
+		jsonObj.put("type", type);
+		jsonObj.put("slaOption", slaOption.toObject());
+		jsonObj.put("execId", String.valueOf(execId));
+//		jsonObj.put("alerts", alerts);
+
+		return jsonObj;
+	}
+
+	@Override
+	public void doAction() throws Exception {
+//		for(Map<String, Object> alert : alerts) {
+		logger.info("Alerting on sla failure.");
+		Map<String, Object> alert = slaOption.getInfo();
+			if(alert.containsKey(SlaOption.ALERT_TYPE)) {
+				String alertType = (String) alert.get(SlaOption.ALERT_TYPE);
+				Alerter alerter = alerters.get(alertType);
+				if(alerter != null) {
+					try {
+						ExecutableFlow flow = executorManager.getExecutableFlow(execId);
+						alerter.alertOnSla(slaOption, SlaOption.createSlaMessage(slaOption, flow));
+					} catch (Exception e) {
+						// TODO Auto-generated catch block
+						e.printStackTrace();
+						logger.error("Failed to alert by " + alertType);
+					}
+				}
+				else {
+					logger.error("Alerter type " + alertType + " doesn't exist. Failed to alert.");
+				}
+			}
+//		}
+	}
+
+//	private String createSlaMessage() {
+//		ExecutableFlow flow = null;
+//		try {
+//			flow = executorManager.getExecutableFlow(execId);
+//		} catch (ExecutorManagerException e) {
+//			e.printStackTrace();
+//			logger.error("Failed to get executable flow.");
+//		}
+//		String type = slaOption.getType();
+//		if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+//			String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+//			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+//			String basicinfo =  "SLA Alert: Your flow " + flowName + " failed to FINISH within " + duration + "</br>";
+//			String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " is expected to FINISH within " + duration + " from " + flow.getStartTime() + "</br>"; 
+//			String actual = "Actual flow status is " + flow.getStatus();
+//			return basicinfo + expected + actual;
+//		} else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+//			String flowName = (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+//			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+//			String basicinfo =  "SLA Alert: Your flow " + flowName + " failed to SUCCEED within " + duration + "</br>";
+//			String expected = "Here is details : </br>" + "Flow " + flowName + " in execution " + execId + " expected to FINISH within " + duration + " from " + flow.getStartTime() + "</br>"; 
+//			String actual = "Actual flow status is " + flow.getStatus();
+//			return basicinfo + expected + actual;
+//		} else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
+//			String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+//			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+//			return "SLA Alert: Your job " + jobName + " failed to FINISH within " + duration + " in execution " + execId;
+//		} else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+//			String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+//			String duration = (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+//			return "SLA Alert: Your job " + jobName + " failed to SUCCEED within " + duration + " in execution " + execId;
+//		} else {
+//			return "Unrecognized SLA type " + type;
+//		}
+//	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+	}
+
+	@Override
+	public String getDescription() {
+		return type + " with " + slaOption.toString();
+	}
+
+}
diff --git a/src/java/azkaban/trigger/builtin/SlaChecker.java b/src/java/azkaban/trigger/builtin/SlaChecker.java
new file mode 100644
index 0000000..dedc23f
--- /dev/null
+++ b/src/java/azkaban/trigger/builtin/SlaChecker.java
@@ -0,0 +1,211 @@
+package azkaban.trigger.builtin;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.joda.time.DateTime;
+import org.joda.time.ReadablePeriod;
+
+import azkaban.executor.ExecutableFlow;
+import azkaban.executor.ExecutableNode;
+import azkaban.executor.ExecutorManagerAdapter;
+import azkaban.executor.ExecutorManagerException;
+import azkaban.executor.Status;
+import azkaban.sla.SlaOption;
+import azkaban.trigger.ConditionChecker;
+import azkaban.utils.Utils;
+
+public class SlaChecker implements ConditionChecker{
+
+	private static final Logger logger = Logger.getLogger(SlaChecker.class);
+	public static final String type = "SlaChecker";
+	
+	private String id;
+	private SlaOption slaOption;
+	private int execId;
+	private long checkTime = -1;
+	
+	private static ExecutorManagerAdapter executorManager;
+	
+	public SlaChecker(String id, SlaOption slaOption, int execId) {
+		this.id = id;
+		this.slaOption = slaOption;
+		this.execId = execId;
+	}
+
+	public static void setExecutorManager(ExecutorManagerAdapter em) {
+		executorManager = em;
+	}
+	
+	private Boolean violateSla(ExecutableFlow flow) {
+		String type = slaOption.getType();
+		logger.info("Checking for " + flow.getExecutionId() + " with sla " + type);
+		logger.info("flow is " + flow.getStatus());
+		if(flow.getStartTime() < 0) {
+			return Boolean.FALSE;
+		}
+		if(type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+			ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
+			DateTime startTime = new DateTime(flow.getStartTime());
+			DateTime checkTime = startTime.plus(duration);
+			this.checkTime = checkTime.getMillis();
+			if(checkTime.isBeforeNow()) {
+				Status status = flow.getStatus();
+				if(status.equals(Status.FAILED) || status.equals(Status.KILLED) || status.equals(Status.SUCCEEDED)) {
+					return Boolean.FALSE;
+				} else {
+					return Boolean.TRUE;
+				}
+			}
+		} else if(type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+			ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
+			DateTime startTime = new DateTime(flow.getStartTime());
+			DateTime checkTime = startTime.plus(duration);
+			this.checkTime = checkTime.getMillis();
+			if(checkTime.isBeforeNow()) {
+				Status status = flow.getStatus();
+				if(status.equals(Status.SUCCEEDED)) {
+					return Boolean.FALSE;
+				} else {
+					return Boolean.TRUE;
+				}
+			}
+		} else if(type.equals(SlaOption.TYPE_JOB_FINISH)) {
+			String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME); 
+			ExecutableNode node = flow.getExecutableNode(jobName);
+			if(node.getStartTime() > 0) {
+				ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
+				DateTime startTime = new DateTime(node.getStartTime());
+				DateTime checkTime = startTime.plus(duration);
+				this.checkTime = checkTime.getMillis();
+				if(checkTime.isBeforeNow()) {
+					Status status = node.getStatus();
+					if(status.equals(Status.FAILED) || status.equals(Status.KILLED) || status.equals(Status.SUCCEEDED)) {
+						return Boolean.FALSE;
+					} else {
+						return Boolean.TRUE;
+					}
+				}
+			}
+		} else if(type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+			String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME); 
+			ExecutableNode node = flow.getExecutableNode(jobName);
+			if(node.getStartTime() > 0) {
+				ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
+				DateTime startTime = new DateTime(node.getStartTime());
+				DateTime checkTime = startTime.plus(duration);
+				this.checkTime = checkTime.getMillis();
+				if(checkTime.isBeforeNow()) {
+					Status status = node.getStatus();
+					if(status.equals(Status.SUCCEEDED)) {
+						return Boolean.FALSE;
+					} else {
+						return Boolean.TRUE;
+					}
+				}
+			}
+		} 
+//		else if(type.equals(SlaOption.TYPE_JOB_PROGRESS)) {
+//			String jobName = (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME); 
+//			float targetProgress = Float.valueOf((String) slaOption.getInfo().get(SlaOption.INFO_PROGRESS_PERCENT));
+//			ExecutableNode node = flow.getExecutableNode(jobName);
+//			if(node.getStartTime() > 0) {
+//				ReadablePeriod duration = Utils.parsePeriodString((String) slaOption.getInfo().get(SlaOption.INFO_DURATION));
+//				DateTime startTime = new DateTime(node.getStartTime());
+//				DateTime checkTime = startTime.plus(duration);
+//				if(checkTime.isBeforeNow()) {
+//					if(node.getProgress() > targetProgress) {
+//						return Boolean.FALSE;
+//					} else {
+//						return Boolean.TRUE;
+//					}
+//				}
+//			} else {
+//				return Boolean.FALSE;
+//			}
+//		}
+		return Boolean.FALSE;
+	}
+	
+	// return true to trigger sla action
+	@Override
+	public Object eval() {
+		ExecutableFlow flow;
+		try {
+			flow = executorManager.getExecutableFlow(execId);
+		} catch (ExecutorManagerException e) {
+			logger.error("Can't get executable flow.", e);
+			e.printStackTrace();
+			// something wrong, send out alerts
+			return Boolean.TRUE;
+		}
+		return violateSla(flow);
+	}
+
+	@Override
+	public Object getNum() {
+		return null;
+	}
+
+	@Override
+	public void reset() {
+	}
+
+	@Override
+	public String getId() {
+		return id;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@Override
+	public ConditionChecker fromJson(Object obj) throws Exception {
+		return createFromJson(obj);
+	}
+
+	@SuppressWarnings("unchecked")
+	public static SlaChecker createFromJson(Object obj) throws Exception {
+		return createFromJson((HashMap<String, Object>)obj);
+	}
+	
+	public static SlaChecker createFromJson(HashMap<String, Object> obj) throws Exception {
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		if(!jsonObj.get("type").equals(type)) {
+			throw new Exception("Cannot create checker of " + type + " from " + jsonObj.get("type"));
+		}
+		String id = (String) jsonObj.get("id");
+		SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
+		int execId = Integer.valueOf((String) jsonObj.get("execId"));
+		return new SlaChecker(id, slaOption, execId);
+	}
+	
+	@Override
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("type", type);
+		jsonObj.put("id", id);
+		jsonObj.put("slaOption", slaOption.toObject());
+		jsonObj.put("execId", String.valueOf(execId));
+	
+		return jsonObj;
+	}
+
+	@Override
+	public void stopChecker() {
+		
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+	}
+
+	@Override
+	public long getNextCheckTime() {
+		return checkTime;
+	}
+
+}
diff --git a/src/java/azkaban/trigger/CheckerTypeLoader.java b/src/java/azkaban/trigger/CheckerTypeLoader.java
new file mode 100644
index 0000000..bc5c06a
--- /dev/null
+++ b/src/java/azkaban/trigger/CheckerTypeLoader.java
@@ -0,0 +1,174 @@
+package azkaban.trigger;
+
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
+
+
+public class CheckerTypeLoader {
+	
+	private static Logger logger = Logger.getLogger(CheckerTypeLoader.class);
+	
+	public static final String DEFAULT_CONDITION_CHECKER_PLUGIN_DIR = "plugins/conditioncheckers";
+	private static final String CHECKERTYPECONFFILE = "plugin.properties"; // need jars.to.include property, will be loaded with user property
+	private static final String COMMONCONFFILE = "common.properties";	// common properties for multiple plugins
+	
+	protected static Map<String, Class<? extends ConditionChecker>> checkerToClass = new HashMap<String, Class<? extends ConditionChecker>>();
+	
+	public void init(Props props) throws TriggerException {
+		
+		
+		// load built-in checkers
+//		
+//		loadBuiltinCheckers();
+//		
+//		loadPluginCheckers(props);
+
+	}
+	
+	public synchronized void registerCheckerType(String type, Class<? extends ConditionChecker> checkerClass) {
+		logger.info("Registering checker " + type);
+		if(!checkerToClass.containsKey(type)) {
+			checkerToClass.put(type, checkerClass);
+		}
+	}
+	
+//	private void loadPluginCheckers(Props props) throws TriggerException {
+//		
+//		String checkerDir = props.getString("azkaban.condition.checker.plugin.dir", DEFAULT_CONDITION_CHECKER_PLUGIN_DIR);
+//		File pluginDir = new File(checkerDir);
+//		if(!pluginDir.exists() || !pluginDir.isDirectory() || !pluginDir.canRead()) {
+//			logger.info("No conditon checker plugins to load.");
+//			return;
+//		}
+//		
+//		logger.info("Loading plugin condition checkers from " + pluginDir);
+//		ClassLoader parentCl = this.getClass().getClassLoader();
+//		
+//		Props globalCheckerConf = null;
+//		File confFile = Utils.findFilefromDir(pluginDir, COMMONCONFFILE);
+//		try {
+//			if(confFile != null) {
+//				globalCheckerConf = new Props(null, confFile);
+//			} else {
+//				globalCheckerConf = new Props();
+//			}
+//		} catch (IOException e) {
+//			throw new TriggerException("Failed to get global properties." + e);
+//		}
+//		
+//		for(File dir : pluginDir.listFiles()) {
+//			if(dir.isDirectory() && dir.canRead()) {
+//				try {
+//					loadPluginTypes(globalCheckerConf, pluginDir, parentCl);
+//				} catch (Exception e) {
+//					logger.info("Plugin checkers failed to load. " + e.getCause());
+//					throw new TriggerException("Failed to load all condition checkers!", e);
+//				}
+//			}
+//		}
+//	}
+//	
+//	@SuppressWarnings("unchecked")
+//	private void loadPluginTypes(Props globalConf, File dir, ClassLoader parentCl) throws TriggerException {
+//		Props checkerConf = null;
+//		File confFile = Utils.findFilefromDir(dir, CHECKERTYPECONFFILE);
+//		if(confFile == null) {
+//			logger.info("No checker type found in " + dir.getAbsolutePath());
+//			return;
+//		}
+//		try {
+//			checkerConf = new Props(globalConf, confFile);
+//		} catch (IOException e) {
+//			throw new TriggerException("Failed to load config for the checker type", e);
+//		}
+//		
+//		String checkerName = dir.getName();
+//		String checkerClass = checkerConf.getString("checker.class");
+//		
+//		List<URL> resources = new ArrayList<URL>();		
+//		for(File f : dir.listFiles()) {
+//			try {
+//				if(f.getName().endsWith(".jar")) {
+//					resources.add(f.toURI().toURL());
+//					logger.info("adding to classpath " + f.toURI().toURL());
+//				}
+//			} catch (MalformedURLException e) {
+//				// TODO Auto-generated catch block
+//				throw new TriggerException(e);
+//			}
+//		}
+//		
+//		// each job type can have a different class loader
+//		ClassLoader checkerCl = new URLClassLoader(resources.toArray(new URL[resources.size()]), parentCl);
+//		
+//		Class<? extends ConditionChecker> clazz = null;
+//		try {
+//			clazz = (Class<? extends ConditionChecker>)checkerCl.loadClass(checkerClass);
+//			checkerToClass.put(checkerName, clazz);
+//		}
+//		catch (ClassNotFoundException e) {
+//			throw new TriggerException(e);
+//		}
+//		
+//		if(checkerConf.getBoolean("need.init")) {
+//			try {
+//				Utils.invokeStaticMethod(checkerCl, checkerClass, "init", checkerConf);
+//			} catch (Exception e) {
+//				e.printStackTrace();
+//				logger.error("Failed to init the checker type " + checkerName);
+//				throw new TriggerException(e);
+//			}
+//		}
+//		
+//		logger.info("Loaded checker type " + checkerName + " " + checkerClass);
+//	}
+	
+	public static void registerBuiltinCheckers(Map<String, Class<? extends ConditionChecker>> builtinCheckers) {
+		checkerToClass.putAll(checkerToClass);
+		for(String type : builtinCheckers.keySet()) {
+			logger.info("Loaded " + type + " checker.");
+		}
+	}
+	
+//	private void loadBuiltinCheckers() {
+//		checkerToClass.put("BasicTimeChecker", BasicTimeChecker.class);
+//		logger.info("Loaded BasicTimeChecker type.");
+//	}
+	
+	public ConditionChecker createCheckerFromJson(String type, Object obj) throws Exception {
+		ConditionChecker checker = null;
+		Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);	
+		if(checkerClass == null) {
+			throw new Exception("Checker type " + type + " not supported!");
+		}
+		checker = (ConditionChecker) Utils.invokeStaticMethod(checkerClass.getClassLoader(), checkerClass.getName(), "createFromJson", obj);
+		
+		return checker;
+	}
+	
+	public ConditionChecker createChecker(String type, Object ... args) {
+		ConditionChecker checker = null;
+		Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);		
+		checker = (ConditionChecker) Utils.callConstructor(checkerClass, args);
+		
+		return checker;
+	}
+	
+	public Map<String, Class<? extends ConditionChecker>> getSupportedCheckers() {
+		return checkerToClass;
+	}
+	
+}
diff --git a/src/java/azkaban/trigger/Condition.java b/src/java/azkaban/trigger/Condition.java
new file mode 100644
index 0000000..2f355d9
--- /dev/null
+++ b/src/java/azkaban/trigger/Condition.java
@@ -0,0 +1,161 @@
+package azkaban.trigger;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.jexl2.Expression;
+import org.apache.commons.jexl2.JexlEngine;
+import org.apache.commons.jexl2.MapContext;
+import org.apache.log4j.Logger;
+import org.joda.time.DateTime;
+
+
+public class Condition {
+	
+	private static Logger logger = Logger.getLogger(Condition.class);
+	
+	private static JexlEngine jexl = new JexlEngine();
+	private static CheckerTypeLoader checkerLoader = null;
+	private Expression expression;
+	private Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+	private MapContext context = new MapContext();
+	private Long nextCheckTime = -1L;	
+	
+	public Condition(Map<String, ConditionChecker> checkers, String expr) {
+		setCheckers(checkers);
+		this.expression = jexl.createExpression(expr);
+		updateNextCheckTime();
+	}
+	
+	public Condition(Map<String, ConditionChecker> checkers, String expr, long nextCheckTime) {
+		this.nextCheckTime = nextCheckTime;
+		setCheckers(checkers);
+//		for(ConditionChecker ck : checkers.values()) {
+//			ck.setCondition(this);
+//		}
+		this.expression = jexl.createExpression(expr);
+	}
+	
+	public synchronized static void setJexlEngine(JexlEngine jexl) {
+		Condition.jexl = jexl;
+	}
+	
+	public synchronized static void setCheckerLoader(CheckerTypeLoader loader) {
+		Condition.checkerLoader = loader;
+	}
+	
+	protected static CheckerTypeLoader getCheckerLoader() {
+		return checkerLoader;
+	}
+	
+	protected void registerChecker(ConditionChecker checker) {
+		checkers.put(checker.getId(), checker);
+		context.set(checker.getId(), checker);
+		updateNextCheckTime();
+	}
+	
+	public long getNextCheckTime() {
+		return nextCheckTime;
+	}
+	
+	public Map<String, ConditionChecker> getCheckers() {
+		return this.checkers;
+	}
+	
+	public void setCheckers(Map<String, ConditionChecker> checkers){
+		this.checkers = checkers;
+		for(ConditionChecker checker : checkers.values()) {
+			this.context.set(checker.getId(), checker);
+//			checker.setCondition(this);
+		}
+		updateNextCheckTime();
+	}
+	
+	public void updateCheckTime(Long ct) {
+		if(nextCheckTime < ct) {
+			nextCheckTime = ct;
+		}
+	}
+	
+	private void updateNextCheckTime() {
+		long time = Long.MAX_VALUE;
+		for(ConditionChecker checker : checkers.values()) {
+			time = Math.min(time, checker.getNextCheckTime());
+		}
+		this.nextCheckTime = time;
+	}
+	
+	public void resetCheckers() {
+		for(ConditionChecker checker : checkers.values()) {
+			checker.reset();
+		}
+		updateNextCheckTime();
+		logger.info("Done resetting checkers. The next check time will be " + new DateTime(nextCheckTime));
+	}
+	
+	public String getExpression() {
+		return this.expression.getExpression();
+	}
+	
+	public void setExpression(String expr) {
+		this.expression = jexl.createExpression(expr);
+	}
+	
+	public boolean isMet() {
+		logger.info("Testing ondition " + expression);
+		return expression.evaluate(context).equals(Boolean.TRUE);
+	}
+	
+	public Object toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("expression", expression.getExpression());
+		
+		List<Object> checkersJson = new ArrayList<Object>();
+		for(ConditionChecker checker : checkers.values()) {
+			Map<String, Object> oneChecker = new HashMap<String, Object>();
+			oneChecker.put("type", checker.getType());
+			oneChecker.put("checkerJson", checker.toJson());
+			checkersJson.add(oneChecker);
+		}
+		jsonObj.put("checkers", checkersJson);
+		jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
+		
+		return jsonObj;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static Condition fromJson(Object obj) throws Exception {
+		if(checkerLoader == null) {
+			throw new Exception("Condition Checker loader not initialized!");
+		}
+		
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		Condition cond = null;
+		
+		try {
+			Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+			List<Object> checkersJson = (List<Object>) jsonObj.get("checkers");			
+			for(Object oneCheckerJson : checkersJson) {
+				Map<String, Object> oneChecker = (HashMap<String, Object>) oneCheckerJson;
+				String type = (String) oneChecker.get("type");
+				ConditionChecker ck = checkerLoader.createCheckerFromJson(type, oneChecker.get("checkerJson"));
+				checkers.put(ck.getId(), ck);
+			}
+			String expr = (String) jsonObj.get("expression");
+			Long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
+				
+			cond = new Condition(checkers, expr, nextCheckTime);
+			
+		} catch(Exception e) {
+			e.printStackTrace();
+			logger.error("Failed to recreate condition from json.", e);
+			throw new Exception("Failed to recreate condition from json.", e);
+		}
+		
+		return cond;
+	}
+	
+
+}
diff --git a/src/java/azkaban/trigger/ConditionChecker.java b/src/java/azkaban/trigger/ConditionChecker.java
new file mode 100644
index 0000000..c78267e
--- /dev/null
+++ b/src/java/azkaban/trigger/ConditionChecker.java
@@ -0,0 +1,31 @@
+package azkaban.trigger;
+
+import java.util.Map;
+
+
+public interface ConditionChecker {
+	
+	Object eval();
+	
+	Object getNum();
+	
+	void reset();
+
+	String getId();
+	
+	String getType();
+	
+	ConditionChecker fromJson(Object obj) throws Exception;
+	
+	Object toJson();
+
+	void stopChecker();
+	
+	void setContext(Map<String, Object> context);
+	
+	long getNextCheckTime();
+	
+//	void setCondition(Condition c);
+//	
+//	String getDescription();
+}
diff --git a/src/java/azkaban/trigger/JdbcTriggerLoader.java b/src/java/azkaban/trigger/JdbcTriggerLoader.java
new file mode 100644
index 0000000..2d70c65
--- /dev/null
+++ b/src/java/azkaban/trigger/JdbcTriggerLoader.java
@@ -0,0 +1,347 @@
+package azkaban.trigger;
+
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.dbutils.DbUtils;
+import org.apache.commons.dbutils.QueryRunner;
+import org.apache.commons.dbutils.ResultSetHandler;
+import org.apache.log4j.Logger;
+import org.joda.time.DateTime;
+
+import azkaban.database.AbstractJdbcLoader;
+import azkaban.utils.GZIPUtils;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Props;
+
+/*
+ * Copyright 2012 LinkedIn, Inc
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+public class JdbcTriggerLoader extends AbstractJdbcLoader implements TriggerLoader {
+	private static Logger logger = Logger.getLogger(JdbcTriggerLoader.class);
+
+	private EncodingType defaultEncodingType = EncodingType.GZIP;
+	
+	private static final String triggerTblName = "triggers";
+
+	private static final String GET_UPDATED_TRIGGERS = 
+			"SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + triggerTblName + " WHERE modify_time>=?";
+
+	private static String GET_ALL_TRIGGERS =
+			"SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + triggerTblName;
+	
+	private static String GET_TRIGGER = 
+			"SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + triggerTblName + " WHERE trigger_id=?";
+	
+	private static String ADD_TRIGGER = 
+			"INSERT INTO " + triggerTblName + " ( modify_time) values (?)";
+	
+	private static String REMOVE_TRIGGER = 
+			"DELETE FROM " + triggerTblName + " WHERE trigger_id=?";
+	
+	private static String UPDATE_TRIGGER = 
+			"UPDATE " + triggerTblName + " SET trigger_source=?, modify_time=?, enc_type=?, data=? WHERE trigger_id=?";
+	
+	public EncodingType getDefaultEncodingType() {
+		return defaultEncodingType;
+	}
+
+	public void setDefaultEncodingType(EncodingType defaultEncodingType) {
+		this.defaultEncodingType = defaultEncodingType;
+	}
+	
+	public JdbcTriggerLoader(Props props) {
+		super(props);
+	}
+
+	@Override
+	public List<Trigger> getUpdatedTriggers(long lastUpdateTime) throws TriggerLoaderException {
+		logger.info("Loading triggers changed since " + new DateTime(lastUpdateTime).toString());
+		Connection connection = getConnection();
+
+		QueryRunner runner = new QueryRunner();
+		ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+	
+		List<Trigger> triggers;
+		
+		try {
+			triggers = runner.query(connection, GET_UPDATED_TRIGGERS, handler, lastUpdateTime);
+		} catch (SQLException e) {
+			logger.error(GET_ALL_TRIGGERS + " failed.");
+
+			throw new TriggerLoaderException("Loading triggers from db failed. ", e);
+		} finally {
+			DbUtils.closeQuietly(connection);
+		}
+		
+		logger.info("Loaded " + triggers.size() + " triggers.");
+		
+		return triggers;
+	}
+	
+	@Override
+	public List<Trigger> loadTriggers() throws TriggerLoaderException {
+		logger.info("Loading all triggers from db.");
+		Connection connection = getConnection();
+
+		QueryRunner runner = new QueryRunner();
+		ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+	
+		List<Trigger> triggers;
+		
+		try {
+			triggers = runner.query(connection, GET_ALL_TRIGGERS, handler);
+		} catch (SQLException e) {
+			logger.error(GET_ALL_TRIGGERS + " failed.");
+
+			throw new TriggerLoaderException("Loading triggers from db failed. ", e);
+		} finally {
+			DbUtils.closeQuietly(connection);
+		}
+		
+		logger.info("Loaded " + triggers.size() + " triggers.");
+		
+		return triggers;
+	}
+
+	@Override
+	public void removeTrigger(Trigger t) throws TriggerLoaderException {		
+		logger.info("Removing trigger " + t.toString() + " from db.");
+
+		QueryRunner runner = createQueryRunner();
+		try {
+			int removes =  runner.update(REMOVE_TRIGGER, t.getTriggerId());
+			if (removes == 0) {
+				throw new TriggerLoaderException("No trigger has been removed.");
+			}
+		} catch (SQLException e) {
+			logger.error(REMOVE_TRIGGER + " failed.");
+			throw new TriggerLoaderException("Remove trigger " + t.toString() + " from db failed. ", e);
+		}
+	}
+	
+	@Override
+	public void addTrigger(Trigger t) throws TriggerLoaderException {
+		logger.info("Inserting trigger " + t.toString() + " into db.");
+		t.setLastModifyTime(System.currentTimeMillis());
+		Connection connection = getConnection();
+		try {
+			addTrigger(connection, t, defaultEncodingType);
+		}
+		catch (Exception e) {
+			throw new TriggerLoaderException("Error uploading trigger", e);
+		}
+		finally {
+			DbUtils.closeQuietly(connection);
+		}
+	}
+
+	private synchronized void addTrigger(Connection connection, Trigger t, EncodingType encType) throws TriggerLoaderException {
+		
+		QueryRunner runner = new QueryRunner();
+		
+		long id;
+		
+		try {
+			runner.update(connection, ADD_TRIGGER, DateTime.now().getMillis());
+			connection.commit();
+			id = runner.query(connection, LastInsertID.LAST_INSERT_ID, new LastInsertID());
+
+			if (id == -1l) {
+				logger.error("trigger id is not properly created.");
+				throw new TriggerLoaderException("trigger id is not properly created.");
+			}
+			
+			t.setTriggerId((int)id);
+			updateTrigger(t);
+			logger.info("uploaded trigger " + t.getDescription());
+		} catch (SQLException e) {
+			throw new TriggerLoaderException("Error creating trigger.", e);
+		}
+		
+	}
+	
+	@Override
+	public void updateTrigger(Trigger t) throws TriggerLoaderException {
+		logger.info("Updating trigger " + t.getTriggerId() + " into db.");
+		t.setLastModifyTime(System.currentTimeMillis());
+		Connection connection = getConnection();
+		try{
+			updateTrigger(connection, t, defaultEncodingType);
+		}
+		catch(Exception e) {
+			e.printStackTrace();
+			throw new TriggerLoaderException("Failed to update trigger " + t.toString() + " into db!");
+		}
+		finally {
+			DbUtils.closeQuietly(connection);
+		}
+	}
+		
+	private void updateTrigger(Connection connection, Trigger t, EncodingType encType) throws TriggerLoaderException {
+
+		String json = JSONUtils.toJSON(t.toJson());
+		byte[] data = null;
+		try {
+			byte[] stringData = json.getBytes("UTF-8");
+			data = stringData;
+	
+			if (encType == EncodingType.GZIP) {
+				data = GZIPUtils.gzipBytes(stringData);
+			}
+			logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:"+ data.length);
+		}
+		catch (IOException e) {
+			throw new TriggerLoaderException("Error encoding the trigger " + t.toString());
+		}
+		
+		QueryRunner runner = new QueryRunner();
+	
+		try {
+			int updates =  runner.update( connection, 
+					UPDATE_TRIGGER, 
+					t.getSource(),
+					t.getLastModifyTime(),
+					encType.getNumVal(),
+					data,
+					t.getTriggerId());
+			connection.commit();
+			if (updates == 0) {
+				throw new TriggerLoaderException("No trigger has been updated.");
+				//logger.error("No trigger is updated!");
+			} else {
+				logger.info("Updated " + updates + " records.");
+			}
+		} catch (SQLException e) {
+			logger.error(UPDATE_TRIGGER + " failed.");
+			throw new TriggerLoaderException("Update trigger " + t.toString() + " into db failed. ", e);
+		}
+	}
+	
+	private static class LastInsertID implements ResultSetHandler<Long> {
+		private static String LAST_INSERT_ID = "SELECT LAST_INSERT_ID()";
+		
+		@Override
+		public Long handle(ResultSet rs) throws SQLException {
+			if (!rs.next()) {
+				return -1l;
+			}
+
+			long id = rs.getLong(1);
+			return id;
+		}
+		
+	}
+	
+	public class TriggerResultHandler implements ResultSetHandler<List<Trigger>> {
+		
+		@Override
+		public List<Trigger> handle(ResultSet rs) throws SQLException {
+			if (!rs.next()) {
+				return Collections.<Trigger>emptyList();
+			}
+			
+			ArrayList<Trigger> triggers = new ArrayList<Trigger>();
+			do {
+				int triggerId = rs.getInt(1);
+//				String triggerSource = rs.getString(2);
+//				long modifyTime = rs.getLong(3);
+				int encodingType = rs.getInt(4);
+				byte[] data = rs.getBytes(5);
+				
+				Object jsonObj = null;
+				if (data != null) {
+					EncodingType encType = EncodingType.fromInteger(encodingType);
+
+					try {
+						// Convoluted way to inflate strings. Should find common package or helper function.
+						if (encType == EncodingType.GZIP) {
+							// Decompress the sucker.
+							String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+							jsonObj = JSONUtils.parseJSONFromString(jsonString);
+						}
+						else {
+							String jsonString = new String(data, "UTF-8");
+							jsonObj = JSONUtils.parseJSONFromString(jsonString);
+						}	
+					} catch (IOException e) {
+						throw new SQLException("Error reconstructing trigger data " );
+					}
+				}
+				
+				Trigger t = null;
+				try {
+					t = Trigger.fromJson(jsonObj);
+					triggers.add(t);
+				} catch (Exception e) {
+					// TODO Auto-generated catch block
+					e.printStackTrace();
+					logger.error("Failed to load trigger " + triggerId);
+				}
+			} while (rs.next());
+			
+			return triggers;
+		}
+		
+	}
+	
+	private Connection getConnection() throws TriggerLoaderException {
+		Connection connection = null;
+		try {
+			connection = super.getDBConnection(false);
+		} catch (Exception e) {
+			DbUtils.closeQuietly(connection);
+			throw new TriggerLoaderException("Error getting DB connection.", e);
+		}
+		
+		return connection;
+	}
+
+	@Override
+	public Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
+		logger.info("Loading trigger " + triggerId + " from db.");
+		Connection connection = getConnection();
+
+		QueryRunner runner = new QueryRunner();
+		ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+	
+		List<Trigger> triggers;
+		
+		try {
+			triggers = runner.query(connection, GET_TRIGGER, handler, triggerId);
+		} catch (SQLException e) {
+			logger.error(GET_TRIGGER + " failed.");
+			throw new TriggerLoaderException("Loading trigger from db failed. ", e);
+		} finally {
+			DbUtils.closeQuietly(connection);
+		}
+		
+		if(triggers.size() == 0) {
+			logger.error("Loaded 0 triggers. Failed to load trigger " + triggerId);
+			throw new TriggerLoaderException("Loaded 0 triggers. Failed to load trigger " + triggerId);
+		}
+		
+		return triggers.get(0);
+	}
+
+	
+
+}
diff --git a/src/java/azkaban/trigger/Trigger.java b/src/java/azkaban/trigger/Trigger.java
new file mode 100644
index 0000000..60e58f8
--- /dev/null
+++ b/src/java/azkaban/trigger/Trigger.java
@@ -0,0 +1,456 @@
+package azkaban.trigger;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.joda.time.DateTime;
+
+import azkaban.utils.JSONUtils;
+
+public class Trigger {
+	
+	private static Logger logger = Logger.getLogger(Trigger.class);
+	
+	private int triggerId = -1;
+	private long lastModifyTime;
+	private long submitTime;
+	private String submitUser;
+	private String source;
+	private TriggerStatus status = TriggerStatus.READY;
+	
+	private Condition triggerCondition;
+	private Condition expireCondition;
+	private List<TriggerAction> actions;
+	private List<TriggerAction> expireActions;
+	
+	private Map<String, Object> info = new HashMap<String, Object>();
+	private Map<String, Object> context = new HashMap<String, Object>();
+	
+	private static ActionTypeLoader actionTypeLoader;
+	
+	private boolean resetOnTrigger = true;
+	private boolean resetOnExpire = true;
+	
+	private long nextCheckTime = -1;
+	
+	@SuppressWarnings("unused")
+	private Trigger() throws TriggerManagerException {	
+		throw new TriggerManagerException("Triggers should always be specified");
+	}
+	
+	public void updateNextCheckTime() {
+		this.nextCheckTime = Math.min(triggerCondition.getNextCheckTime(), expireCondition.getNextCheckTime());
+	}
+	
+	public long getNextCheckTime() {
+		return nextCheckTime;
+	}
+
+	public void setNextCheckTime(long nct) {
+		this.nextCheckTime = nct;
+	}
+	
+	public long getSubmitTime() {
+		return submitTime;
+	}
+
+	public String getSubmitUser() {
+		return submitUser;
+	}
+
+	public TriggerStatus getStatus() {
+		return status;
+	}
+
+	public void setStatus(TriggerStatus status) {
+		this.status = status;
+	}
+
+	public Condition getTriggerCondition() {
+		return triggerCondition;
+	}
+
+	public Condition getExpireCondition() {
+		return expireCondition;
+	}
+
+	public List<TriggerAction> getActions() {
+		return actions;
+	}
+
+	public List<TriggerAction> getExpireActions() {
+		return expireActions;
+	}
+	
+	public Map<String, Object> getInfo() {
+		return info;
+	}
+
+	public void setInfo(Map<String, Object> info) {
+		this.info = info;
+	}
+	
+	public Map<String, Object> getContext() {
+		return context;
+	}
+
+	public void setContext(Map<String, Object> context) {
+		this.context = context;
+	}
+	
+	public Trigger(
+			long lastModifyTime, 
+			long submitTime, 
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions, 
+			List<TriggerAction> expireActions,
+			Map<String, Object> info,
+			Map<String, Object> context) {
+		this.lastModifyTime = lastModifyTime;
+		this.submitTime = submitTime;
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = expireActions;
+		this.info = info;
+		this.context = context;
+	}
+	
+	public Trigger(
+			long lastModifyTime, 
+			long submitTime, 
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions, 
+			List<TriggerAction> expireActions) {
+		this.lastModifyTime = lastModifyTime;
+		this.submitTime = submitTime;
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = expireActions;
+	}
+	
+	public Trigger(
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions, 
+			List<TriggerAction> expireActions) {
+		this.lastModifyTime = DateTime.now().getMillis();
+		this.submitTime = DateTime.now().getMillis();
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = expireActions;
+	}
+	
+	public Trigger(
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions) {
+		this.lastModifyTime = DateTime.now().getMillis();
+		this.submitTime = DateTime.now().getMillis();
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = new ArrayList<TriggerAction>();
+	}
+	
+	public Trigger(
+			long lastModifyTime, 
+			long submitTime, 
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions) {
+		this.lastModifyTime = lastModifyTime;
+		this.submitTime = submitTime;
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = new ArrayList<TriggerAction>();
+	}
+	
+	public Trigger(
+			int triggerId,
+			long lastModifyTime, 
+			long submitTime,
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions,
+			List<TriggerAction> expireActions,
+			Map<String, Object> info,
+			Map<String, Object> context) {
+		this.triggerId = triggerId;
+		this.lastModifyTime = lastModifyTime;
+		this.submitTime = submitTime;
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = expireActions;
+		this.info = info;
+		this.context = context;
+	}
+	
+	public Trigger(
+			int triggerId,
+			long lastModifyTime, 
+			long submitTime, 
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions,
+			List<TriggerAction> expireActions) {
+		this.triggerId = triggerId;
+		this.lastModifyTime = lastModifyTime;
+		this.submitTime = submitTime;
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = expireActions;
+	}
+	
+	public Trigger(
+			int triggerId,
+			long lastModifyTime, 
+			long submitTime,
+			String submitUser, 
+			String source,
+			Condition triggerCondition,
+			Condition expireCondition,
+			List<TriggerAction> actions) {
+		this.triggerId = triggerId;
+		this.lastModifyTime = lastModifyTime;
+		this.submitTime = submitTime;
+		this.submitUser = submitUser;
+		this.source = source;
+		this.triggerCondition = triggerCondition;
+		this.expireCondition = expireCondition;
+		this.actions = actions;
+		this.expireActions = new ArrayList<TriggerAction>();
+	}
+	
+	public static synchronized void setActionTypeLoader(ActionTypeLoader loader) {
+		Trigger.actionTypeLoader = loader;
+	}
+	
+	public static ActionTypeLoader getActionTypeLoader() {
+		return actionTypeLoader;
+	}
+	
+	public boolean isResetOnTrigger() {
+		return resetOnTrigger;
+	}
+	
+	public void setResetOnTrigger(boolean resetOnTrigger) {
+		this.resetOnTrigger = resetOnTrigger;
+	}
+	
+	public boolean isResetOnExpire() {
+		return resetOnExpire;
+	}
+	
+	public void setResetOnExpire(boolean resetOnExpire) {
+		this.resetOnExpire = resetOnExpire;
+	}
+
+	public long getLastModifyTime() {
+		return lastModifyTime;
+	}
+	
+	public void setLastModifyTime(long lastModifyTime) {
+		this.lastModifyTime = lastModifyTime;
+	}
+
+	public void setTriggerId(int id) {
+		this.triggerId = id;
+	}
+	
+	public int getTriggerId() {
+		return triggerId;
+	}
+
+	public boolean triggerConditionMet(){
+		return triggerCondition.isMet();
+	}
+	
+	public boolean expireConditionMet(){
+		return expireCondition.isMet();
+	}
+	
+	public void resetTriggerConditions() {
+		triggerCondition.resetCheckers();
+		updateNextCheckTime();
+	}
+	
+	public void resetExpireCondition() {
+		expireCondition.resetCheckers();
+		updateNextCheckTime();
+	}
+	
+	public List<TriggerAction> getTriggerActions () {
+		return actions;
+	}
+	
+	public Map<String, Object> toJson() {
+		Map<String, Object> jsonObj = new HashMap<String, Object>();
+		jsonObj.put("triggerCondition", triggerCondition.toJson());
+		jsonObj.put("expireCondition", expireCondition.toJson());
+		List<Object> actionsJson = new ArrayList<Object>();
+		for(TriggerAction action : actions) {
+			Map<String, Object> oneActionJson = new HashMap<String, Object>();
+			oneActionJson.put("type", action.getType());
+			oneActionJson.put("actionJson", action.toJson());
+			actionsJson.add(oneActionJson);
+		}
+		jsonObj.put("actions", actionsJson);
+		List<Object> expireActionsJson = new ArrayList<Object>();
+		for(TriggerAction expireAction : expireActions) {
+			Map<String, Object> oneExpireActionJson = new HashMap<String, Object>();
+			oneExpireActionJson.put("type", expireAction.getType());
+			oneExpireActionJson.put("actionJson", expireAction.toJson());
+			expireActionsJson.add(oneExpireActionJson);
+		}
+		jsonObj.put("expireActions", expireActionsJson);
+		
+		jsonObj.put("resetOnTrigger", String.valueOf(resetOnTrigger));
+		jsonObj.put("resetOnExpire", String.valueOf(resetOnExpire));
+		jsonObj.put("submitUser", submitUser);
+		jsonObj.put("source", source);
+		jsonObj.put("submitTime", String.valueOf(submitTime));
+		jsonObj.put("lastModifyTime", String.valueOf(lastModifyTime));
+		jsonObj.put("triggerId", String.valueOf(triggerId));
+		jsonObj.put("status", status.toString());
+		jsonObj.put("info", info);
+		jsonObj.put("context", context);
+		return jsonObj;
+	}
+	
+	
+	public String getSource() {
+		return source;
+	}
+
+	@SuppressWarnings("unchecked")
+	public static Trigger fromJson(Object obj) throws Exception {
+		
+		if(actionTypeLoader == null) {
+			throw new Exception("Trigger Action Type loader not initialized.");
+		}
+ 		
+		Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+		
+		Trigger trigger = null;
+		try{
+			logger.info("Decoding for " + JSONUtils.toJSON(obj));
+			Condition triggerCond = Condition.fromJson(jsonObj.get("triggerCondition"));
+			Condition expireCond = Condition.fromJson(jsonObj.get("expireCondition"));
+			List<TriggerAction> actions = new ArrayList<TriggerAction>();
+			List<Object> actionsJson = (List<Object>) jsonObj.get("actions");
+			for(Object actObj : actionsJson) {
+				Map<String, Object> oneActionJson = (HashMap<String, Object>) actObj;
+				String type = (String) oneActionJson.get("type");
+				TriggerAction act = actionTypeLoader.createActionFromJson(type, oneActionJson.get("actionJson"));
+				actions.add(act);
+			}
+			List<TriggerAction> expireActions = new ArrayList<TriggerAction>();
+			List<Object> expireActionsJson = (List<Object>) jsonObj.get("expireActions");
+			for(Object expireActObj : expireActionsJson) {
+				Map<String, Object> oneExpireActionJson = (HashMap<String, Object>) expireActObj;
+				String type = (String) oneExpireActionJson.get("type");
+				TriggerAction expireAct = actionTypeLoader.createActionFromJson(type, oneExpireActionJson.get("actionJson"));
+				expireActions.add(expireAct);
+			}
+			boolean resetOnTrigger = Boolean.valueOf((String) jsonObj.get("resetOnTrigger"));
+			boolean resetOnExpire = Boolean.valueOf((String) jsonObj.get("resetOnExpire"));
+			String submitUser = (String) jsonObj.get("submitUser");
+			String source = (String) jsonObj.get("source");
+			long submitTime = Long.valueOf((String) jsonObj.get("submitTime"));
+			long lastModifyTime = Long.valueOf((String) jsonObj.get("lastModifyTime"));
+			int triggerId = Integer.valueOf((String) jsonObj.get("triggerId"));
+			TriggerStatus status = TriggerStatus.valueOf((String)jsonObj.get("status"));
+			Map<String, Object> info = (Map<String, Object>) jsonObj.get("info");
+			Map<String, Object> context = (Map<String, Object>) jsonObj.get("context");
+			if(context == null) {
+				context = new HashMap<String, Object>();
+			}
+			for(ConditionChecker checker : triggerCond.getCheckers().values()) {
+				checker.setContext(context);
+			}
+			for(ConditionChecker checker : expireCond.getCheckers().values()) {
+				checker.setContext(context);
+			}
+			for(TriggerAction action : actions) {
+				action.setContext(context);
+			}
+			for(TriggerAction action : expireActions) {
+				action.setContext(context);
+			}
+			
+			trigger = new Trigger(triggerId, lastModifyTime, submitTime, submitUser, source, triggerCond, expireCond, actions, expireActions, info, context);
+			trigger.setResetOnExpire(resetOnExpire);
+			trigger.setResetOnTrigger(resetOnTrigger);
+			trigger.setStatus(status);
+		}catch(Exception e) {
+			e.printStackTrace();
+			logger.error("Failed to decode the trigger.", e);
+			throw new Exception("Failed to decode the trigger.", e);
+		}
+		
+		return trigger;
+	}
+
+	public String getDescription() {
+		StringBuffer actionsString = new StringBuffer();
+		for(TriggerAction act : actions) {
+			actionsString.append(", ");
+			actionsString.append(act.getDescription());
+		}
+		return "Trigger from " + getSource() +
+				" with trigger condition of " + triggerCondition.getExpression() +
+				" and expire condition of " + expireCondition.getExpression() + 
+				actionsString;
+	}
+
+	public void stopCheckers() {
+		for(ConditionChecker checker : triggerCondition.getCheckers().values()) {
+			checker.stopChecker();
+		}
+		for(ConditionChecker checker : expireCondition.getCheckers().values()) {
+			checker.stopChecker();
+		}
+		
+	}
+
+	
+}
diff --git a/src/java/azkaban/trigger/TriggerAction.java b/src/java/azkaban/trigger/TriggerAction.java
new file mode 100644
index 0000000..b186b7b
--- /dev/null
+++ b/src/java/azkaban/trigger/TriggerAction.java
@@ -0,0 +1,21 @@
+package azkaban.trigger;
+
+import java.util.Map;
+
+public interface TriggerAction {
+	
+	String getId();
+	
+	String getType();
+	
+	TriggerAction fromJson(Object obj) throws Exception;
+	
+	Object toJson();
+	
+	void doAction() throws Exception;
+	
+	void setContext(Map<String, Object> context);
+
+	String getDescription();
+	
+}
diff --git a/src/java/azkaban/trigger/TriggerAgent.java b/src/java/azkaban/trigger/TriggerAgent.java
new file mode 100644
index 0000000..cdabe94
--- /dev/null
+++ b/src/java/azkaban/trigger/TriggerAgent.java
@@ -0,0 +1,14 @@
+package azkaban.trigger;
+
+import azkaban.utils.Props;
+
+public interface TriggerAgent {
+	public void loadTriggerFromProps(Props props) throws Exception;
+
+	public String getTriggerSource();
+	
+	public void start() throws Exception;
+	
+	public void shutdown();
+
+}
diff --git a/src/java/azkaban/trigger/TriggerException.java b/src/java/azkaban/trigger/TriggerException.java
new file mode 100644
index 0000000..e49fb2a
--- /dev/null
+++ b/src/java/azkaban/trigger/TriggerException.java
@@ -0,0 +1,19 @@
+package azkaban.trigger;
+
+
+public class TriggerException extends Exception{
+	private static final long serialVersionUID = 1L;
+
+	public TriggerException(String message) {
+		super(message);
+	}
+	
+	public TriggerException(String message, Throwable cause) {
+		super(message, cause);
+	}
+
+	public TriggerException(Throwable e) {
+		super(e);
+	}
+}
+
diff --git a/src/java/azkaban/trigger/TriggerLoader.java b/src/java/azkaban/trigger/TriggerLoader.java
new file mode 100644
index 0000000..bddb9cc
--- /dev/null
+++ b/src/java/azkaban/trigger/TriggerLoader.java
@@ -0,0 +1,19 @@
+package azkaban.trigger;
+
+import java.util.List;
+
+public interface TriggerLoader {
+
+	public void addTrigger(Trigger t) throws TriggerLoaderException;	
+
+	public void removeTrigger(Trigger s) throws TriggerLoaderException;
+	
+	public void updateTrigger(Trigger t) throws TriggerLoaderException;
+	
+	public List<Trigger> loadTriggers() throws TriggerLoaderException;
+
+	public Trigger loadTrigger(int triggerId) throws TriggerLoaderException;
+
+	public List<Trigger> getUpdatedTriggers(long lastUpdateTime) throws TriggerLoaderException;
+	
+}
diff --git a/src/java/azkaban/trigger/TriggerManager.java b/src/java/azkaban/trigger/TriggerManager.java
new file mode 100644
index 0000000..d4f1698
--- /dev/null
+++ b/src/java/azkaban/trigger/TriggerManager.java
@@ -0,0 +1,461 @@
+package azkaban.trigger;
+
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.PriorityBlockingQueue;
+import org.apache.log4j.Logger;
+
+import azkaban.utils.Props;
+
+public class TriggerManager implements TriggerManagerAdapter{
+	private static Logger logger = Logger.getLogger(TriggerManager.class);
+	public static final long DEFAULT_SCANNER_INTERVAL_MS = 60000;
+
+	private static Map<Integer, Trigger> triggerIdMap = new ConcurrentHashMap<Integer, Trigger>();
+	
+	private CheckerTypeLoader checkerTypeLoader;
+	private ActionTypeLoader actionTypeLoader;
+	private TriggerLoader triggerLoader;
+
+	private final TriggerScannerThread runnerThread;
+	private long lastRunnerThreadCheckTime = -1;
+	private long runnerThreadIdleTime = -1;
+	private LocalTriggerJMX jmxStats = new LocalTriggerJMX();
+	
+	private String scannerStage = "";
+	
+	public TriggerManager(Props props, TriggerLoader triggerLoader) throws TriggerManagerException {
+
+		this.triggerLoader = triggerLoader;
+		
+		long scannerInterval = props.getLong("trigger.scan.interval", DEFAULT_SCANNER_INTERVAL_MS);
+		runnerThread = new TriggerScannerThread(scannerInterval);
+
+		checkerTypeLoader = new CheckerTypeLoader();
+		actionTypeLoader = new ActionTypeLoader();
+
+		try {
+			checkerTypeLoader.init(props);
+			actionTypeLoader.init(props);
+		} catch (Exception e) {
+			throw new TriggerManagerException(e);
+		}
+		
+		Condition.setCheckerLoader(checkerTypeLoader);
+		Trigger.setActionTypeLoader(actionTypeLoader);
+		
+		logger.info("TriggerManager loaded.");
+	}
+
+	@Override
+	public void start() throws TriggerManagerException{
+		
+		try{
+			// expect loader to return valid triggers
+			List<Trigger> triggers = triggerLoader.loadTriggers();
+			for(Trigger t : triggers) {
+				runnerThread.addTrigger(t);
+				triggerIdMap.put(t.getTriggerId(), t);
+			}
+		}catch(Exception e) {
+			e.printStackTrace();
+			throw new TriggerManagerException(e);
+		}
+		
+		runnerThread.start();
+	}
+	
+	protected CheckerTypeLoader getCheckerLoader() {
+		return checkerTypeLoader;
+	}
+
+	protected ActionTypeLoader getActionLoader() {
+		return actionTypeLoader;
+	}
+
+	public synchronized void insertTrigger(Trigger t) throws TriggerManagerException {
+		try {
+			triggerLoader.addTrigger(t);
+		} catch (TriggerLoaderException e) {
+			throw new TriggerManagerException(e);
+		}
+		runnerThread.addTrigger(t);
+		triggerIdMap.put(t.getTriggerId(), t);
+	}
+	
+	public synchronized void removeTrigger(int id) throws TriggerManagerException {
+		Trigger t = triggerIdMap.get(id);
+		if(t != null) {
+			removeTrigger(triggerIdMap.get(id));
+		}
+	}
+	
+	public synchronized void updateTrigger(int id) throws TriggerManagerException {
+		if(! triggerIdMap.containsKey(id)) {
+			throw new TriggerManagerException("The trigger to update " + id + " doesn't exist!");
+		}
+		
+		Trigger t;
+		try {
+			t = triggerLoader.loadTrigger(id);
+		} catch (TriggerLoaderException e) {
+			throw new TriggerManagerException(e);
+		}
+		updateTrigger(t);
+	}
+	
+	public synchronized void updateTrigger(Trigger t) throws TriggerManagerException {
+		runnerThread.deleteTrigger(triggerIdMap.get(t.getTriggerId()));
+		runnerThread.addTrigger(t);
+		triggerIdMap.put(t.getTriggerId(), t);
+	}
+
+	public synchronized void removeTrigger(Trigger t) throws TriggerManagerException {
+		runnerThread.deleteTrigger(t);
+		triggerIdMap.remove(t.getTriggerId());
+		try {
+			t.stopCheckers();
+			triggerLoader.removeTrigger(t);
+		} catch (TriggerLoaderException e) {
+			throw new TriggerManagerException(e);
+		}
+	}
+	
+	public List<Trigger> getTriggers() {
+		return new ArrayList<Trigger>(triggerIdMap.values());
+	}
+	
+	public Map<String, Class<? extends ConditionChecker>> getSupportedCheckers() {
+		return checkerTypeLoader.getSupportedCheckers();
+	}
+	
+	private class TriggerScannerThread extends Thread {
+		private BlockingQueue<Trigger> triggers;
+		private boolean shutdown = false;
+		//private AtomicBoolean stillAlive = new AtomicBoolean(true);
+		private final long scannerInterval;
+		
+		public TriggerScannerThread(long scannerInterval) {
+			triggers = new PriorityBlockingQueue<Trigger>(1, new TriggerComparator());
+			this.setName("TriggerRunnerManager-Trigger-Scanner-Thread");
+			this.scannerInterval = scannerInterval;;
+		}
+
+		public void shutdown() {
+			logger.error("Shutting down trigger manager thread " + this.getName());
+			shutdown = true;
+			//stillAlive.set(false);
+			this.interrupt();
+		}
+		
+		public synchronized void addTrigger(Trigger t) {
+			t.updateNextCheckTime();
+			triggers.add(t);
+		}
+		
+		public synchronized void deleteTrigger(Trigger t) {
+			triggers.remove(t);
+		}
+
+		public void run() {
+			//while(stillAlive.get()) {
+			while(!shutdown) {
+				synchronized (this) {
+					try{
+						lastRunnerThreadCheckTime = System.currentTimeMillis();
+						
+						scannerStage = "Ready to start a new scan cycle at " + lastRunnerThreadCheckTime;
+						
+						try{
+							checkAllTriggers();
+						} catch(Exception e) {
+							e.printStackTrace();
+							logger.error(e.getMessage());
+						} catch(Throwable t) {
+							t.printStackTrace();
+							logger.error(t.getMessage());
+						}
+						
+						scannerStage = "Done flipping all triggers.";
+						
+						runnerThreadIdleTime = scannerInterval - (System.currentTimeMillis() - lastRunnerThreadCheckTime);
+
+						if(runnerThreadIdleTime < 0) {
+							logger.error("Trigger manager thread " + this.getName() + " is too busy!");
+						} else {
+							wait(runnerThreadIdleTime);
+						}
+					} catch(InterruptedException e) {
+						logger.info("Interrupted. Probably to shut down.");
+					}
+				}
+			}
+		}
+		
+		private void checkAllTriggers() throws TriggerManagerException {
+			long now = System.currentTimeMillis();
+			for(Trigger t : triggers) {
+				scannerStage = "Checking for trigger " + t.getTriggerId();
+				if(t.getNextCheckTime() > now) {
+					logger.info("Skipping trigger" + t.getTriggerId() + " until " + t.getNextCheckTime());
+					continue;
+				}
+				logger.info("Checking trigger " + t.getTriggerId());
+				if(t.getStatus().equals(TriggerStatus.READY)) {
+					if(t.triggerConditionMet()) {
+						onTriggerTrigger(t);
+					} else if (t.expireConditionMet()) {
+						onTriggerExpire(t);
+					}
+				}
+				if(t.getStatus().equals(TriggerStatus.EXPIRED) && t.getSource().equals("azkaban")) {
+					removeTrigger(t);
+				} else {
+					t.updateNextCheckTime();
+				}
+			}
+		}
+		
+		private void onTriggerTrigger(Trigger t) throws TriggerManagerException {
+			List<TriggerAction> actions = t.getTriggerActions();
+			for(TriggerAction action : actions) {
+				try {
+					logger.info("Doing trigger actions");
+					action.doAction();
+				} catch (Exception e) {
+					// TODO Auto-generated catch block
+					//throw new TriggerManagerException("action failed to execute", e);
+					logger.error("Failed to do action " + action.getDescription(), e);
+				} catch (Throwable th) {
+					logger.error("Failed to do action " + action.getDescription(), th);
+				}
+			}
+			if(t.isResetOnTrigger()) {
+				t.resetTriggerConditions();
+				t.resetExpireCondition();
+			} else {
+				t.setStatus(TriggerStatus.EXPIRED);
+			}
+			try {
+				triggerLoader.updateTrigger(t);
+			}
+			catch (TriggerLoaderException e) {
+				throw new TriggerManagerException(e);
+			}
+		}
+		
+		private void onTriggerExpire(Trigger t) throws TriggerManagerException {
+			List<TriggerAction> expireActions = t.getExpireActions();
+			for(TriggerAction action : expireActions) {
+				try {
+					logger.info("Doing expire actions");
+					action.doAction();
+				} catch (Exception e) {
+					// TODO Auto-generated catch block
+					//throw new TriggerManagerException("action failed to execute", e);
+					logger.error("Failed to do expire action " + action.getDescription(), e);
+				} catch (Throwable th) {
+					logger.error("Failed to do expire action " + action.getDescription(), th);
+				}
+			}
+			if(t.isResetOnExpire()) {
+				t.resetTriggerConditions();
+				t.resetExpireCondition();
+//				updateTrigger(t);
+			} else {
+				t.setStatus(TriggerStatus.EXPIRED);
+			}
+			try {
+				triggerLoader.updateTrigger(t);
+			} catch (TriggerLoaderException e) {
+				throw new TriggerManagerException(e);
+			}
+		}
+		
+		private class TriggerComparator implements Comparator<Trigger> {
+			@Override
+			public int compare(Trigger arg0, Trigger arg1) {
+				long first = arg1.getNextCheckTime();
+				long second = arg0.getNextCheckTime();
+				
+				if(first == second) {
+					return 0;
+				} else if (first < second) {
+					return 1;
+				}
+				return -1;
+			}
+		}
+	}
+	
+	public synchronized Trigger getTrigger(int triggerId) {
+		return triggerIdMap.get(triggerId);
+	}
+
+	public void expireTrigger(int triggerId) {
+		Trigger t = getTrigger(triggerId);
+		t.setStatus(TriggerStatus.EXPIRED);
+//		updateAgent(t);
+	}
+
+	@Override
+	public List<Trigger> getTriggers(String triggerSource) {
+		List<Trigger> triggers = new ArrayList<Trigger>();
+		for(Trigger t : triggerIdMap.values()) {
+			if(t.getSource().equals(triggerSource)) {
+				triggers.add(t);
+			}
+		}
+		return triggers;
+	}
+
+	@Override
+	public List<Trigger> getTriggerUpdates(String triggerSource, long lastUpdateTime) throws TriggerManagerException{
+		List<Trigger> triggers = new ArrayList<Trigger>();
+		for(Trigger t : triggerIdMap.values()) {
+			if(t.getSource().equals(triggerSource) && t.getLastModifyTime() > lastUpdateTime) {
+				triggers.add(t);
+			}
+		}
+		return triggers;
+	}
+	
+	@Override
+	public List<Trigger> getAllTriggerUpdates(long lastUpdateTime) throws TriggerManagerException {
+		List<Trigger> triggers = new ArrayList<Trigger>();
+		for(Trigger t : triggerIdMap.values()) {
+			if(t.getLastModifyTime() > lastUpdateTime) {
+				triggers.add(t);
+			}
+		}
+		return triggers;
+	}
+
+//	public void loadTrigger(int triggerId) throws TriggerManagerException {
+//		Trigger t;
+//		try {
+//			t = triggerLoader.loadTrigger(triggerId);
+//		} catch (TriggerLoaderException e) {
+//			throw new TriggerManagerException(e);
+//		}
+//		if(t.getStatus().equals(TriggerStatus.PREPARING)) {
+//			triggerIdMap.put(t.getTriggerId(), t);
+//			runnerThread.addTrigger(t);
+//			t.setStatus(TriggerStatus.READY);
+//		}
+//	}
+
+	@Override
+	public void insertTrigger(Trigger t, String user) throws TriggerManagerException {
+		insertTrigger(t);
+	}
+
+	@Override
+	public void removeTrigger(int id, String user) throws TriggerManagerException {
+		removeTrigger(id);
+	}
+
+	@Override
+	public void updateTrigger(Trigger t, String user) throws TriggerManagerException {
+		updateTrigger(t);
+	}
+	
+//	@Override
+//	public void insertTrigger(int triggerId, String user) throws TriggerManagerException {
+//		Trigger t;
+//		try {
+//			t = triggerLoader.loadTrigger(triggerId);
+//		} catch (TriggerLoaderException e) {
+//			throw new TriggerManagerException(e);
+//		}
+//		if(t != null) {
+//			insertTrigger(t);
+//		}
+//	}
+	
+	@Override
+	public void shutdown() {
+		runnerThread.shutdown();
+	}
+
+	@Override
+	public TriggerJMX getJMX() {
+		return this.jmxStats;
+	}
+	
+	private class LocalTriggerJMX implements TriggerJMX {
+
+		@Override
+		public long getLastRunnerThreadCheckTime() {
+			// TODO Auto-generated method stub
+			return lastRunnerThreadCheckTime;
+		}
+
+		@Override
+		public boolean isRunnerThreadActive() {
+			// TODO Auto-generated method stub
+			return runnerThread.isAlive();
+		}
+
+		@Override
+		public String getPrimaryServerHost() {
+			return "local";
+		}
+
+		@Override
+		public int getNumTriggers() {
+			// TODO Auto-generated method stub
+			return triggerIdMap.size();
+		}
+
+		@Override
+		public String getTriggerSources() {
+			Set<String> sources = new HashSet<String>();
+			for(Trigger t : triggerIdMap.values()) {
+				sources.add(t.getSource());
+			}
+			return sources.toString();
+		}
+
+		@Override
+		public String getTriggerIds() {
+			return triggerIdMap.keySet().toString();
+		}
+
+		@Override
+		public long getScannerIdleTime() {
+			// TODO Auto-generated method stub
+			return runnerThreadIdleTime;
+		}
+
+		@Override
+		public Map<String, Object> getAllJMXMbeans() {
+			return new HashMap<String, Object>();
+		}
+
+		@Override
+		public String getScannerThreadStage() {
+			return scannerStage;
+		}
+		
+	}
+
+	@Override
+	public void registerCheckerType(String name, Class<? extends ConditionChecker> checker) {
+		checkerTypeLoader.registerCheckerType(name, checker);
+	}
+
+	@Override
+	public void registerActionType(String name, Class<? extends TriggerAction> action) {
+		actionTypeLoader.registerActionType(name, action);
+	}
+	
+	
+}
diff --git a/src/java/azkaban/trigger/TriggerManagerAdapter.java b/src/java/azkaban/trigger/TriggerManagerAdapter.java
new file mode 100644
index 0000000..cdf2921
--- /dev/null
+++ b/src/java/azkaban/trigger/TriggerManagerAdapter.java
@@ -0,0 +1,43 @@
+package azkaban.trigger;
+
+import java.util.List;
+import java.util.Map;
+
+
+public interface TriggerManagerAdapter {
+	
+	public void insertTrigger(Trigger t, String user) throws TriggerManagerException;
+	
+	public void removeTrigger(int id, String user) throws TriggerManagerException;
+	
+	public void updateTrigger(Trigger t, String user) throws TriggerManagerException;
+
+	public List<Trigger> getAllTriggerUpdates(long lastUpdateTime) throws TriggerManagerException;
+	
+	public List<Trigger> getTriggerUpdates(String triggerSource, long lastUpdateTime) throws TriggerManagerException;
+	
+	public List<Trigger> getTriggers(String trigegerSource);
+
+	public void start() throws TriggerManagerException;
+	
+	public void shutdown();
+
+	public void registerCheckerType(String name, Class<? extends ConditionChecker> checker);
+	
+	public void registerActionType(String name, Class<? extends TriggerAction> action);
+	
+	public TriggerJMX getJMX();
+	
+	public interface TriggerJMX {
+		public long getLastRunnerThreadCheckTime();
+		public boolean isRunnerThreadActive();
+		public String getPrimaryServerHost();
+		public int getNumTriggers();
+		public String getTriggerSources();
+		public String getTriggerIds();
+		public long getScannerIdleTime();
+		public Map<String, Object> getAllJMXMbeans();
+		public String getScannerThreadStage();
+	}
+	
+}
diff --git a/src/java/azkaban/trigger/TriggerStatus.java b/src/java/azkaban/trigger/TriggerStatus.java
new file mode 100644
index 0000000..3fcadf7
--- /dev/null
+++ b/src/java/azkaban/trigger/TriggerStatus.java
@@ -0,0 +1,29 @@
+package azkaban.trigger;
+
+public enum TriggerStatus {
+	READY(10), PAUSED(20), EXPIRED(30);
+	
+	private int numVal;
+
+	TriggerStatus(int numVal) {
+		this.numVal = numVal;
+	}
+
+	public int getNumVal() {
+		return numVal;
+	}
+	
+	public static TriggerStatus fromInteger(int x) {
+		switch (x) {
+		case 10:
+			return READY;
+		case 20:
+			return PAUSED;
+		case 30:
+			return EXPIRED;
+		default:
+			return READY;
+		}
+	}
+
+}
diff --git a/src/java/azkaban/user/Permission.java b/src/java/azkaban/user/Permission.java
index 0405c6e..42158f8 100644
--- a/src/java/azkaban/user/Permission.java
+++ b/src/java/azkaban/user/Permission.java
@@ -29,6 +29,7 @@ public class Permission {
 		WRITE(0x0000002),
 		EXECUTE(0x0000004),
 		SCHEDULE(0x0000008),
+		METRICS(0x0000010),
 		CREATEPROJECTS(0x40000000), // Only used for roles
 		ADMIN(0x8000000);
 		
diff --git a/src/java/azkaban/utils/AbstractMailer.java b/src/java/azkaban/utils/AbstractMailer.java
index 998dc50..8d91456 100644
--- a/src/java/azkaban/utils/AbstractMailer.java
+++ b/src/java/azkaban/utils/AbstractMailer.java
@@ -64,6 +64,10 @@ public class AbstractMailer {
 		return message;
 	}
 	
+	public EmailMessage prepareEmailMessage(String subject, String mimetype, Collection<String> emailList) {
+		return createEmailMessage(subject, mimetype, emailList);
+	}
+	
 	public String getAzkabanName() {
 		return azkabanName;
 	}
diff --git a/src/java/azkaban/utils/StringUtils.java b/src/java/azkaban/utils/StringUtils.java
index 02bad64..77c5941 100644
--- a/src/java/azkaban/utils/StringUtils.java
+++ b/src/java/azkaban/utils/StringUtils.java
@@ -17,6 +17,7 @@
 package azkaban.utils;
 
 import java.util.Collection;
+import java.util.List;
 
 public class StringUtils {
 	public static final char SINGLE_QUOTE = '\'';
@@ -38,6 +39,17 @@ public class StringUtils {
 		return buf.toString();
 	}
 	
+	@Deprecated
+	public static String join(List<String> list, String delimiter) {
+		StringBuffer buffer = new StringBuffer();
+		for (String str: list) {
+			buffer.append(str);
+			buffer.append(delimiter);
+		}
+		
+		return buffer.toString();
+	}
+	
 	/**
 	 * Use this when you don't want to include Apache Common's string for
 	 * plugins.
diff --git a/src/java/azkaban/utils/Utils.java b/src/java/azkaban/utils/Utils.java
index 8e13ffe..17a3edf 100644
--- a/src/java/azkaban/utils/Utils.java
+++ b/src/java/azkaban/utils/Utils.java
@@ -35,6 +35,14 @@ import java.util.zip.ZipFile;
 import java.util.zip.ZipOutputStream;
 
 import org.apache.commons.io.IOUtils;
+import org.joda.time.Days;
+import org.joda.time.DurationFieldType;
+import org.joda.time.Hours;
+import org.joda.time.Minutes;
+import org.joda.time.Months;
+import org.joda.time.ReadablePeriod;
+import org.joda.time.Seconds;
+import org.joda.time.Weeks;
 
 /**
  * A util helper class full of static methods that are commonly used.
@@ -82,6 +90,17 @@ public class Utils {
 			return t;
 		}
 	}
+	
+	public static File findFilefromDir(File dir, String fn){
+		if(dir.isDirectory()) {
+			for(File f : dir.listFiles()) {
+				if(f.getName().equals(fn)) {
+					return f;
+				}
+			}
+		}
+		return null;
+	}
 
 	/**
 	 * Print the message and then exit with the given exit code
@@ -283,10 +302,85 @@ public class Utils {
 		
 		Class<?>[] argTypes = new Class[args.length];
 		for (int i=0; i < args.length; ++i) {
+			//argTypes[i] = args[i].getClass();
 			argTypes[i] = args[i].getClass();
 		}
 		
 		Method method = clazz.getDeclaredMethod(methodName, argTypes);
 		return method.invoke(null, args);
 	}
+	
+	public static void copyStream(InputStream input, OutputStream output) throws IOException {
+		byte[] buffer = new byte[1024];
+		int bytesRead;
+		while ((bytesRead = input.read(buffer)) != -1) {
+			output.write(buffer, 0, bytesRead);
+		}
+	}
+	
+	public static ReadablePeriod parsePeriodString(String periodStr) {
+		ReadablePeriod period;
+		char periodUnit = periodStr.charAt(periodStr.length() - 1);
+		if (periodUnit == 'n') {
+			return null;
+		}
+
+		int periodInt = Integer.parseInt(periodStr.substring(0,
+				periodStr.length() - 1));
+		switch (periodUnit) {
+		case 'M':
+			period = Months.months(periodInt);
+			break;
+		case 'w':
+			period = Weeks.weeks(periodInt);
+			break;
+		case 'd':
+			period = Days.days(periodInt);
+			break;
+		case 'h':
+			period = Hours.hours(periodInt);
+			break;
+		case 'm':
+			period = Minutes.minutes(periodInt);
+			break;
+		case 's':
+			period = Seconds.seconds(periodInt);
+			break;
+		default:
+			throw new IllegalArgumentException("Invalid schedule period unit '"
+					+ periodUnit);
+		}
+
+		return period;
+	}
+
+	public static String createPeriodString(ReadablePeriod period) {
+		String periodStr = "n";
+
+		if (period == null) {
+			return "n";
+		}
+
+		if (period.get(DurationFieldType.months()) > 0) {
+			int months = period.get(DurationFieldType.months());
+			periodStr = months + "M";
+		} else if (period.get(DurationFieldType.weeks()) > 0) {
+			int weeks = period.get(DurationFieldType.weeks());
+			periodStr = weeks + "w";
+		} else if (period.get(DurationFieldType.days()) > 0) {
+			int days = period.get(DurationFieldType.days());
+			periodStr = days + "d";
+		} else if (period.get(DurationFieldType.hours()) > 0) {
+			int hours = period.get(DurationFieldType.hours());
+			periodStr = hours + "h";
+		} else if (period.get(DurationFieldType.minutes()) > 0) {
+			int minutes = period.get(DurationFieldType.minutes());
+			periodStr = minutes + "m";
+		} else if (period.get(DurationFieldType.seconds()) > 0) {
+			int seconds = period.get(DurationFieldType.seconds());
+			periodStr = seconds + "s";
+		}
+
+		return periodStr;
+	}
 }
diff --git a/src/java/azkaban/webapp/AzkabanServer.java b/src/java/azkaban/webapp/AzkabanServer.java
index d11f810..edb7d19 100644
--- a/src/java/azkaban/webapp/AzkabanServer.java
+++ b/src/java/azkaban/webapp/AzkabanServer.java
@@ -135,4 +135,5 @@ public abstract class AzkabanServer {
 	public abstract VelocityEngine getVelocityEngine();
 	
 	public abstract UserManager getUserManager();
+	
 }
diff --git a/src/java/azkaban/webapp/AzkabanWebServer.java b/src/java/azkaban/webapp/AzkabanWebServer.java
index 02a81aa..e066be9 100644
--- a/src/java/azkaban/webapp/AzkabanWebServer.java
+++ b/src/java/azkaban/webapp/AzkabanWebServer.java
@@ -27,7 +27,9 @@ import java.net.URLClassLoader;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.TimeZone;
 
 import javax.management.MBeanInfo;
@@ -49,23 +51,34 @@ import org.mortbay.jetty.servlet.DefaultServlet;
 import org.mortbay.jetty.servlet.ServletHolder;
 import org.mortbay.thread.QueuedThreadPool;
 
+import azkaban.alert.Alerter;
 import azkaban.database.AzkabanDatabaseSetup;
 import azkaban.executor.ExecutorManager;
+import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.JdbcExecutorLoader;
 import azkaban.jmx.JmxExecutorManager;
 import azkaban.jmx.JmxJettyServer;
-import azkaban.jmx.JmxSLAManager;
-import azkaban.jmx.JmxScheduler;
+import azkaban.jmx.JmxTriggerManager;
 import azkaban.project.JdbcProjectLoader;
 import azkaban.project.ProjectManager;
 
-import azkaban.scheduler.JdbcScheduleLoader;
+import azkaban.scheduler.ScheduleLoader;
 import azkaban.scheduler.ScheduleManager;
-import azkaban.sla.JdbcSLALoader;
-import azkaban.sla.SLAManager;
-import azkaban.sla.SLAManagerException;
+import azkaban.scheduler.TriggerBasedScheduleLoader;
+import azkaban.trigger.JdbcTriggerLoader;
+import azkaban.trigger.TriggerLoader;
+import azkaban.trigger.TriggerManager;
+import azkaban.trigger.TriggerManagerException;
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.trigger.builtin.CreateTriggerAction;
+import azkaban.trigger.builtin.ExecuteFlowAction;
+import azkaban.trigger.builtin.ExecutionChecker;
+import azkaban.trigger.builtin.KillExecutionAction;
+import azkaban.trigger.builtin.SlaAlertAction;
+import azkaban.trigger.builtin.SlaChecker;
 import azkaban.user.UserManager;
 import azkaban.user.XmlUserManager;
+import azkaban.utils.Emailer;
 import azkaban.utils.FileIOUtils;
 import azkaban.utils.Props;
 import azkaban.utils.PropsUtils;
@@ -80,6 +93,8 @@ import azkaban.webapp.servlet.ScheduleServlet;
 import azkaban.webapp.servlet.HistoryServlet;
 import azkaban.webapp.servlet.ProjectServlet;
 import azkaban.webapp.servlet.ProjectManagerServlet;
+import azkaban.webapp.servlet.TriggerManagerServlet;
+import azkaban.webapp.servlet.TriggerPlugin;
 import azkaban.webapp.servlet.ViewerPlugin;
 import azkaban.webapp.session.SessionCache;
 
@@ -128,16 +143,18 @@ public class AzkabanWebServer extends AzkabanServer {
 	private final Server server;
 	private UserManager userManager;
 	private ProjectManager projectManager;
-	private ExecutorManager executorManager;
+	private ExecutorManagerAdapter executorManager;
 	private ScheduleManager scheduleManager;
-	private SLAManager slaManager;
-
+	private TriggerManager triggerManager;
+	private Map<String, Alerter> alerters;
+	
 	private final ClassLoader baseClassLoader;
 	
 	private Props props;
 	private SessionCache sessionCache;
 	private File tempDir;
 	private List<ViewerPlugin> viewerPlugins;
+	private Map<String, TriggerPlugin> triggerPlugins;
 	
 	private MBeanServer mbeanServer;
 	private ArrayList<ObjectName> registeredMBeans = new ArrayList<ObjectName>();
@@ -159,11 +176,24 @@ public class AzkabanWebServer extends AzkabanServer {
 		velocityEngine = configureVelocityEngine(props.getBoolean(VELOCITY_DEV_MODE_PARAM, false));
 		sessionCache = new SessionCache(props);
 		userManager = loadUserManager(props);
-		projectManager = loadProjectManager(props);
+		
+		alerters = loadAlerters(props);
+		
 		executorManager = loadExecutorManager(props);
-		slaManager = loadSLAManager(props);
-		scheduleManager = loadScheduleManager(executorManager, slaManager, props);
-		baseClassLoader = getBaseClassloader();
+		projectManager = loadProjectManager(props);
+		
+		triggerManager = loadTriggerManager(props);
+		loadBuiltinCheckersAndActions();		
+		
+		// load all triggger agents here
+		scheduleManager = loadScheduleManager(triggerManager, props);
+		
+		String triggerPluginDir = props.getString("trigger.plugin.dir", "plugins/triggers");
+		
+		loadPluginCheckersAndActions(triggerPluginDir);
+		
+		//baseClassLoader = getBaseClassloader();
+		baseClassLoader = this.getClassLoader();
 		
 		tempDir = new File(props.getString("azkaban.temp.dir", "temp"));
 
@@ -172,7 +202,6 @@ public class AzkabanWebServer extends AzkabanServer {
 			String timezone = props.getString(DEFAULT_TIMEZONE_ID);
 			TimeZone.setDefault(TimeZone.getTimeZone(timezone));
 			DateTimeZone.setDefault(DateTimeZone.forID(timezone));
-
 			logger.info("Setting timezone to " + timezone);
 		}
 		
@@ -183,13 +212,15 @@ public class AzkabanWebServer extends AzkabanServer {
 		this.viewerPlugins = viewerPlugins;
 	}
 	
+	private void setTriggerPlugins(Map<String, TriggerPlugin> triggerPlugins) {
+		this.triggerPlugins = triggerPlugins;
+	}
+	
 	private UserManager loadUserManager(Props props) {
 		Class<?> userManagerClass = props.getClass(USER_MANAGER_CLASS_PARAM, null);
 		logger.info("Loading user manager class " + userManagerClass.getName());
 		UserManager manager = null;
-
 		if (userManagerClass != null && userManagerClass.getConstructors().length > 0) {
-
 			try {
 				Constructor<?> userManagerConstructor = userManagerClass.getConstructor(Props.class);
 				manager = (UserManager) userManagerConstructor.newInstance(props);
@@ -202,34 +233,323 @@ public class AzkabanWebServer extends AzkabanServer {
 		else {
 			manager = new XmlUserManager(props);
 		}
-
 		return manager;
 	}
 	
 	private ProjectManager loadProjectManager(Props props) {
 		logger.info("Loading JDBC for project management");
-
 		JdbcProjectLoader loader = new JdbcProjectLoader(props);
 		ProjectManager manager = new ProjectManager(loader, props);
-		
 		return manager;
 	}
 
 	private ExecutorManager loadExecutorManager(Props props) throws Exception {
 		JdbcExecutorLoader loader = new JdbcExecutorLoader(props);
-		ExecutorManager execManager = new ExecutorManager(props, loader);
+		ExecutorManager execManager = new ExecutorManager(props, loader, alerters);
 		return execManager;
 	}
+	
+//	private ExecutorManagerAdapter loadExecutorManagerAdapter(Props props) throws Exception {
+//		String executorMode = props.getString("executor.manager.mode", "local");
+//		ExecutorManagerAdapter adapter;
+//		if(executorMode.equals("local")) {
+//			adapter = loadExecutorManager(props);
+//		} else {
+//			throw new Exception("Unknown ExecutorManager mode " + executorMode);
+//		}
+//		return adapter;
+//	}
 
-	private ScheduleManager loadScheduleManager(ExecutorManager execManager, SLAManager slaManager, Props props ) throws Exception {
-		ScheduleManager schedManager = new ScheduleManager(execManager, projectManager, slaManager, new JdbcScheduleLoader(props));
+	private ScheduleManager loadScheduleManager(TriggerManager tm, Props props ) throws Exception {
+		logger.info("Loading trigger based scheduler");
+		ScheduleLoader loader = new TriggerBasedScheduleLoader(tm, ScheduleManager.triggerSource);
+		return new ScheduleManager(loader);
+	}
+
+	private TriggerManager loadTriggerManager(Props props) throws TriggerManagerException {
+		TriggerLoader loader = new JdbcTriggerLoader(props);
+		return new TriggerManager(props, loader);
+	}
+	
+	private void loadBuiltinCheckersAndActions() {
+		logger.info("Loading built-in checker and action types");
+		if(triggerManager instanceof TriggerManager) {
+			SlaChecker.setExecutorManager(executorManager);
+			ExecuteFlowAction.setExecutorManager(executorManager);
+			ExecuteFlowAction.setProjectManager(projectManager);
+			ExecuteFlowAction.setTriggerManager(triggerManager);
+			KillExecutionAction.setExecutorManager(executorManager);
+			SlaAlertAction.setExecutorManager(executorManager);
+			//Map<String, azkaban.alert.Alerter> alerters = loadAlerters(props);
+			SlaAlertAction.setAlerters(alerters);
+			SlaAlertAction.setExecutorManager(executorManager);
+			CreateTriggerAction.setTriggerManager(triggerManager);
+			ExecutionChecker.setExecutorManager(executorManager);
+		}
+		triggerManager.registerCheckerType(BasicTimeChecker.type, BasicTimeChecker.class);
+		triggerManager.registerCheckerType(SlaChecker.type, SlaChecker.class);
+		triggerManager.registerCheckerType(ExecutionChecker.type, ExecutionChecker.class);
+		triggerManager.registerActionType(ExecuteFlowAction.type, ExecuteFlowAction.class);
+		triggerManager.registerActionType(KillExecutionAction.type, KillExecutionAction.class);
+		triggerManager.registerActionType(SlaAlertAction.type, SlaAlertAction.class);
+		triggerManager.registerActionType(CreateTriggerAction.type, CreateTriggerAction.class);
+	}
+	
+	private Map<String, Alerter> loadAlerters(Props props) {
+		Map<String, Alerter> allAlerters = new HashMap<String, Alerter>();
+		// load built-in alerters
+		Emailer mailAlerter = new Emailer(props);
+		allAlerters.put("email", mailAlerter);
+		// load all plugin alerters
+		String pluginDir = props.getString("alerter.plugin.dir", "plugins/alerter");
+		allAlerters.putAll(loadPluginAlerters(pluginDir));
+		return allAlerters;
+	}
+	
+	private Map<String, Alerter> loadPluginAlerters(String pluginPath) {
+		File alerterPluginPath = new File(pluginPath);
+		if (!alerterPluginPath.exists()) {
+			return Collections.<String, Alerter>emptyMap();
+		}
+			
+		Map<String, Alerter> installedAlerterPlugins = new HashMap<String, Alerter>();
+		ClassLoader parentLoader = getClass().getClassLoader();
+		File[] pluginDirs = alerterPluginPath.listFiles();
+		ArrayList<String> jarPaths = new ArrayList<String>();
+		for (File pluginDir: pluginDirs) {
+			if (!pluginDir.isDirectory()) {
+				logger.error("The plugin path " + pluginDir + " is not a directory.");
+				continue;
+			}
+			
+			// Load the conf directory
+			File propertiesDir = new File(pluginDir, "conf");
+			Props pluginProps = null;
+			if (propertiesDir.exists() && propertiesDir.isDirectory()) {
+				File propertiesFile = new File(propertiesDir, "plugin.properties");
+				File propertiesOverrideFile = new File(propertiesDir, "override.properties");
+				
+				if (propertiesFile.exists()) {
+					if (propertiesOverrideFile.exists()) {
+						pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile);
+					}
+					else {
+						pluginProps = PropsUtils.loadProps(null, propertiesFile);
+					}
+				}
+				else {
+					logger.error("Plugin conf file " + propertiesFile + " not found.");
+					continue;
+				}
+			}
+			else {
+				logger.error("Plugin conf path " + propertiesDir + " not found.");
+				continue;
+			}
+			
+			String pluginName = pluginProps.getString("alerter.name");
+			List<String> extLibClasspath = pluginProps.getStringList("alerter.external.classpaths", (List<String>)null);
+			
+			String pluginClass = pluginProps.getString("alerter.class");
+			if (pluginClass == null) {
+				logger.error("Alerter class is not set.");
+			}
+			else {
+				logger.info("Plugin class " + pluginClass);
+			}
+			
+			URLClassLoader urlClassLoader = null;
+			File libDir = new File(pluginDir, "lib");
+			if (libDir.exists() && libDir.isDirectory()) {
+				File[] files = libDir.listFiles();
+				
+				ArrayList<URL> urls = new ArrayList<URL>();
+				for (int i=0; i < files.length; ++i) {
+					try {
+						URL url = files[i].toURI().toURL();
+						urls.add(url);
+					} catch (MalformedURLException e) {
+						logger.error(e);
+					}
+				}
+				if (extLibClasspath != null) {
+					for (String extLib : extLibClasspath) {
+						try {
+							File file = new File(pluginDir, extLib);
+							URL url = file.toURI().toURL();
+							urls.add(url);
+						} catch (MalformedURLException e) {
+							logger.error(e);
+						}
+					}
+				}
+				
+				urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
+			}
+			else {
+				logger.error("Library path " + propertiesDir + " not found.");
+				continue;
+			}
+			
+			Class<?> alerterClass = null;
+			try {
+				alerterClass = urlClassLoader.loadClass(pluginClass);
+			}
+			catch (ClassNotFoundException e) {
+				logger.error("Class " + pluginClass + " not found.");
+				continue;
+			}
 
-		return schedManager;
+			String source = FileIOUtils.getSourcePathFromClass(alerterClass);
+			logger.info("Source jar " + source);
+			jarPaths.add("jar:file:" + source);
+			
+			Constructor<?> constructor = null;
+			try {
+				constructor = alerterClass.getConstructor(Props.class);
+			} catch (NoSuchMethodException e) {
+				logger.error("Constructor not found in " + pluginClass);
+				continue;
+			}
+			
+			Object obj = null;
+			try {
+				obj = constructor.newInstance(pluginProps);
+			} catch (Exception e) {
+				logger.error(e);
+			} 
+			
+			if (!(obj instanceof Alerter)) {
+				logger.error("The object is not an Alerter");
+				continue;
+			}
+			
+			Alerter plugin = (Alerter) obj;
+			installedAlerterPlugins.put(pluginName, plugin);
+		}
+		
+		return installedAlerterPlugins;
+		
 	}
+	
+	private void loadPluginCheckersAndActions(String pluginPath) {
+		logger.info("Loading plug-in checker and action types");
+		File triggerPluginPath = new File(pluginPath);
+		if (!triggerPluginPath.exists()) {
+			logger.error("plugin path " + pluginPath + " doesn't exist!");
+			return;
+		}
+			
+		ClassLoader parentLoader = this.getClassLoader();
+		File[] pluginDirs = triggerPluginPath.listFiles();
+		ArrayList<String> jarPaths = new ArrayList<String>();
+		for (File pluginDir: pluginDirs) {
+			if (!pluginDir.exists()) {
+				logger.error("Error! Trigger plugin path " + pluginDir.getPath() + " doesn't exist.");
+				continue;
+			}
+			
+			if (!pluginDir.isDirectory()) {
+				logger.error("The plugin path " + pluginDir + " is not a directory.");
+				continue;
+			}
+			
+			// Load the conf directory
+			File propertiesDir = new File(pluginDir, "conf");
+			Props pluginProps = null;
+			if (propertiesDir.exists() && propertiesDir.isDirectory()) {
+				File propertiesFile = new File(propertiesDir, "plugin.properties");
+				File propertiesOverrideFile = new File(propertiesDir, "override.properties");
+				
+				if (propertiesFile.exists()) {
+					if (propertiesOverrideFile.exists()) {
+						pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile);
+					}
+					else {
+						pluginProps = PropsUtils.loadProps(null, propertiesFile);
+					}
+				}
+				else {
+					logger.error("Plugin conf file " + propertiesFile + " not found.");
+					continue;
+				}
+			}
+			else {
+				logger.error("Plugin conf path " + propertiesDir + " not found.");
+				continue;
+			}
+			
+			List<String> extLibClasspath = pluginProps.getStringList("trigger.external.classpaths", (List<String>)null);
+			
+			String pluginClass = pluginProps.getString("trigger.class");
+			if (pluginClass == null) {
+				logger.error("Trigger class is not set.");
+			}
+			else {
+				logger.error("Plugin class " + pluginClass);
+			}
+			
+			URLClassLoader urlClassLoader = null;
+			File libDir = new File(pluginDir, "lib");
+			if (libDir.exists() && libDir.isDirectory()) {
+				File[] files = libDir.listFiles();
+				
+				ArrayList<URL> urls = new ArrayList<URL>();
+				for (int i=0; i < files.length; ++i) {
+					try {
+						URL url = files[i].toURI().toURL();
+						urls.add(url);
+					} catch (MalformedURLException e) {
+						logger.error(e);
+					}
+				}
+				if (extLibClasspath != null) {
+					for (String extLib : extLibClasspath) {
+						try {
+							File file = new File(pluginDir, extLib);
+							URL url = file.toURI().toURL();
+							urls.add(url);
+						} catch (MalformedURLException e) {
+							logger.error(e);
+						}
+					}
+				}
+				
+				urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
+			}
+			else {
+				logger.error("Library path " + propertiesDir + " not found.");
+				continue;
+			}
+			
+			Class<?> triggerClass = null;
+			try {
+				triggerClass = urlClassLoader.loadClass(pluginClass);
+			}
+			catch (ClassNotFoundException e) {
+				logger.error("Class " + pluginClass + " not found.");
+				continue;
+			}
 
-	private SLAManager loadSLAManager(Props props) throws SLAManagerException {
-		SLAManager slaManager = new SLAManager(executorManager, new JdbcSLALoader(props), props);
-		return slaManager;
+			String source = FileIOUtils.getSourcePathFromClass(triggerClass);
+			logger.info("Source jar " + source);
+			jarPaths.add("jar:file:" + source);
+			
+			try {
+				Utils.invokeStaticMethod(urlClassLoader, pluginClass, "initiateCheckerTypes", pluginProps, app);
+			} catch (Exception e) {
+				logger.error("Unable to initiate checker types for " + pluginClass);
+				continue;
+			}
+			
+			try {
+				Utils.invokeStaticMethod(urlClassLoader, pluginClass, "initiateActionTypes", pluginProps, app);
+			} catch (Exception e) {
+				logger.error("Unable to initiate action types for " + pluginClass);
+				continue;
+			}
+			
+		}
 	}
 	
 	/**
@@ -269,18 +589,22 @@ public class AzkabanWebServer extends AzkabanServer {
 	/**
      * 
      */
-	public ExecutorManager getExecutorManager() {
+	public ExecutorManagerAdapter getExecutorManager() {
 		return executorManager;
 	}
 	
-	public SLAManager getSLAManager() {
-		return slaManager;
-	}
-	
 	public ScheduleManager getScheduleManager() {
 		return scheduleManager;
 	}
 	
+	public TriggerManager getTriggerManager() {
+		return triggerManager;
+	}
+	
+//	public TriggerBasedScheduler getScheduler() {
+//		return scheduler;
+//	}
+//	
 	/**
 	 * Creates and configures the velocity engine.
 	 * 
@@ -312,28 +636,28 @@ public class AzkabanWebServer extends AzkabanServer {
 		return engine;
 	}
 
-	private ClassLoader getBaseClassloader() throws MalformedURLException {
-		final ClassLoader retVal;
-
-		String hadoopHome = System.getenv("HADOOP_HOME");
-		String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
-
-		if (hadoopConfDir != null) {
-			logger.info("Using hadoop config found in " + hadoopConfDir);
-			retVal = new URLClassLoader(new URL[] { new File(hadoopConfDir)
-					.toURI().toURL() }, getClass().getClassLoader());
-		} else if (hadoopHome != null) {
-			logger.info("Using hadoop config found in " + hadoopHome);
-			retVal = new URLClassLoader(
-					new URL[] { new File(hadoopHome, "conf").toURI().toURL() },
-					getClass().getClassLoader());
-		} else {
-			logger.info("HADOOP_HOME not set, using default hadoop config.");
-			retVal = getClass().getClassLoader();
-		}
-
-		return retVal;
-	}
+//	private ClassLoader getBaseClassloader() throws MalformedURLException {
+//		final ClassLoader retVal;
+//
+//		String hadoopHome = System.getenv("HADOOP_HOME");
+//		String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
+//
+//		if (hadoopConfDir != null) {
+//			logger.info("Using hadoop config found in " + hadoopConfDir);
+//			retVal = new URLClassLoader(new URL[] { new File(hadoopConfDir)
+//					.toURI().toURL() }, getClass().getClassLoader());
+//		} else if (hadoopHome != null) {
+//			logger.info("Using hadoop config found in " + hadoopHome);
+//			retVal = new URLClassLoader(
+//					new URL[] { new File(hadoopHome, "conf").toURI().toURL() },
+//					getClass().getClassLoader());
+//		} else {
+//			logger.info("HADOOP_HOME not set, using default hadoop config.");
+//			retVal = getClass().getClassLoader();
+//		}
+//
+//		return retVal;
+//	}
 
 	public ClassLoader getClassLoader() {
 		return baseClassLoader;
@@ -440,9 +764,25 @@ public class AzkabanWebServer extends AzkabanServer {
 		root.addServlet(new ServletHolder(new HistoryServlet()), "/history");
 		root.addServlet(new ServletHolder(new ScheduleServlet()),"/schedule");
 		root.addServlet(new ServletHolder(new JMXHttpServlet()),"/jmx");
+		root.addServlet(new ServletHolder(new TriggerManagerServlet()),"/triggers");
 		
 		String viewerPluginDir = azkabanSettings.getString("viewer.plugin.dir", "plugins/viewer");
 		app.setViewerPlugins(loadViewerPlugins(root, viewerPluginDir, app.getVelocityEngine()));
+		
+		// triggerplugin
+		String triggerPluginDir = azkabanSettings.getString("trigger.plugin.dir", "plugins/triggers");
+		Map<String, TriggerPlugin> triggerPlugins = loadTriggerPlugins(root, triggerPluginDir, app);
+		app.setTriggerPlugins(triggerPlugins);
+		// always have basic time trigger
+		//TODO: find something else to do the job
+//		app.getTriggerManager().addTriggerAgent(app.getScheduleManager().getTriggerSource(), app.getScheduleManager());
+		// add additional triggers
+//		for(TriggerPlugin plugin : triggerPlugins.values()) {
+//			TriggerAgent agent = plugin.getAgent();
+//			app.getTriggerManager().addTriggerAgent(agent.getTriggerSource(), agent);
+//		}
+		// fire up
+		app.getTriggerManager().start();
 
 		root.setAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY, app);
 		try {
@@ -471,6 +811,150 @@ public class AzkabanWebServer extends AzkabanServer {
 		logger.info("Server running on " + (ssl ? "ssl" : "") + " port " + port + ".");
 	}
 
+	private static Map<String, TriggerPlugin> loadTriggerPlugins(Context root, String pluginPath, AzkabanWebServer azkabanWebApp) {
+		File triggerPluginPath = new File(pluginPath);
+		if (!triggerPluginPath.exists()) {
+			//return Collections.<String, TriggerPlugin>emptyMap();
+			return new HashMap<String, TriggerPlugin>();
+		}
+			
+		Map<String, TriggerPlugin> installedTriggerPlugins = new HashMap<String, TriggerPlugin>();
+		ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader();
+		File[] pluginDirs = triggerPluginPath.listFiles();
+		ArrayList<String> jarPaths = new ArrayList<String>();
+		for (File pluginDir: pluginDirs) {
+			if (!pluginDir.exists()) {
+				logger.error("Error! Trigger plugin path " + pluginDir.getPath() + " doesn't exist.");
+				continue;
+			}
+			
+			if (!pluginDir.isDirectory()) {
+				logger.error("The plugin path " + pluginDir + " is not a directory.");
+				continue;
+			}
+			
+			// Load the conf directory
+			File propertiesDir = new File(pluginDir, "conf");
+			Props pluginProps = null;
+			if (propertiesDir.exists() && propertiesDir.isDirectory()) {
+				File propertiesFile = new File(propertiesDir, "plugin.properties");
+				File propertiesOverrideFile = new File(propertiesDir, "override.properties");
+				
+				if (propertiesFile.exists()) {
+					if (propertiesOverrideFile.exists()) {
+						pluginProps = PropsUtils.loadProps(null, propertiesFile, propertiesOverrideFile);
+					}
+					else {
+						pluginProps = PropsUtils.loadProps(null, propertiesFile);
+					}
+				}
+				else {
+					logger.error("Plugin conf file " + propertiesFile + " not found.");
+					continue;
+				}
+			}
+			else {
+				logger.error("Plugin conf path " + propertiesDir + " not found.");
+				continue;
+			}
+			
+			String pluginName = pluginProps.getString("trigger.name");
+//			String pluginWebPath = pluginProps.getString("trigger.web.path");
+//			int pluginOrder = pluginProps.getInt("trigger.order", 0);
+//			boolean pluginHidden = pluginProps.getBoolean("trigger.hidden", false);
+			List<String> extLibClasspath = pluginProps.getStringList("trigger.external.classpaths", (List<String>)null);
+			
+			String pluginClass = pluginProps.getString("trigger.class");
+			if (pluginClass == null) {
+				logger.error("Trigger class is not set.");
+			}
+			else {
+				logger.error("Plugin class " + pluginClass);
+			}
+			
+			URLClassLoader urlClassLoader = null;
+			File libDir = new File(pluginDir, "lib");
+			if (libDir.exists() && libDir.isDirectory()) {
+				File[] files = libDir.listFiles();
+				
+				ArrayList<URL> urls = new ArrayList<URL>();
+				for (int i=0; i < files.length; ++i) {
+					try {
+						URL url = files[i].toURI().toURL();
+						urls.add(url);
+					} catch (MalformedURLException e) {
+						logger.error(e);
+					}
+				}
+				if (extLibClasspath != null) {
+					for (String extLib : extLibClasspath) {
+						try {
+							File file = new File(pluginDir, extLib);
+							URL url = file.toURI().toURL();
+							urls.add(url);
+						} catch (MalformedURLException e) {
+							logger.error(e);
+						}
+					}
+				}
+				
+				urlClassLoader = new URLClassLoader(urls.toArray(new URL[urls.size()]), parentLoader);
+			}
+			else {
+				logger.error("Library path " + propertiesDir + " not found.");
+				continue;
+			}
+			
+			Class<?> triggerClass = null;
+			try {
+				triggerClass = urlClassLoader.loadClass(pluginClass);
+			}
+			catch (ClassNotFoundException e) {
+				logger.error("Class " + pluginClass + " not found.");
+				continue;
+			}
+
+			String source = FileIOUtils.getSourcePathFromClass(triggerClass);
+			logger.info("Source jar " + source);
+			jarPaths.add("jar:file:" + source);
+			
+			Constructor<?> constructor = null;
+			try {
+				constructor = triggerClass.getConstructor(String.class, Props.class, Context.class, AzkabanWebServer.class);
+			} catch (NoSuchMethodException e) {
+				logger.error("Constructor not found in " + pluginClass);
+				continue;
+			}
+			
+			Object obj = null;
+			try {
+				obj = constructor.newInstance(pluginName, pluginProps, root, azkabanWebApp);
+			} catch (Exception e) {
+				logger.error(e);
+			} 
+			
+			if (!(obj instanceof TriggerPlugin)) {
+				logger.error("The object is not an TriggerPlugin");
+				continue;
+			}
+			
+			TriggerPlugin plugin = (TriggerPlugin) obj;
+			installedTriggerPlugins.put(pluginName, plugin);
+		}
+		
+		// Velocity needs the jar resource paths to be set.
+		String jarResourcePath = StringUtils.join(jarPaths, ", ");
+		logger.info("Setting jar resource path " + jarResourcePath);
+		VelocityEngine ve = azkabanWebApp.getVelocityEngine();
+		ve.addProperty("jar.resource.loader.path", jarResourcePath);
+		
+		return installedTriggerPlugins;
+	}
+	
+	public Map<String, TriggerPlugin> getTriggerPlugins() {
+		return triggerPlugins;
+	}
+	
 	private static List<ViewerPlugin> loadViewerPlugins(Context root, String pluginPath, VelocityEngine ve) {
 		File viewerPluginPath = new File(pluginPath);
 		if (!viewerPluginPath.exists()) {
@@ -689,9 +1173,12 @@ public class AzkabanWebServer extends AzkabanServer {
 		mbeanServer = ManagementFactory.getPlatformMBeanServer();
 
 		registerMbean("jetty", new JmxJettyServer(server));
-		registerMbean("scheduler", new JmxScheduler(scheduleManager));
-		registerMbean("slaManager", new JmxSLAManager(slaManager));
-		registerMbean("executorManager", new JmxExecutorManager(executorManager));
+		registerMbean("triggerManager", new JmxTriggerManager(triggerManager));
+		if(executorManager instanceof ExecutorManager) {
+			registerMbean("executorManager", new JmxExecutorManager((ExecutorManager) executorManager));
+		}
+//		registerMbean("executorManager", new JmxExecutorManager(executorManager));
+//		registerMbean("executorManager", new JmxExecutorManager(executorManager));
 	}
 	
 	public void close() {
@@ -704,7 +1191,9 @@ public class AzkabanWebServer extends AzkabanServer {
 			logger.error("Failed to cleanup MBeanServer", e);
 		}
 		scheduleManager.shutdown();
-		slaManager.shutdown();
+//		if(executorManager instanceof ExecutorManagerLocalAdapter) {
+//			((ExecutorManagerLocalAdapter)executorManager).getExecutorManager().shutdown();
+//		}
 		executorManager.shutdown();
 	}
 	
diff --git a/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java b/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
index 51e925d..f7a5184 100644
--- a/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
+++ b/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
@@ -63,7 +63,9 @@ public abstract class AbstractAzkabanServlet extends HttpServlet {
 	private String color;
 
 	private List<ViewerPlugin> viewerPlugins;
-
+	private List<TriggerPlugin> triggerPlugins;
+	
+	
 	/**
 	 * To retrieve the application for the servlet
 	 * 
@@ -90,6 +92,7 @@ public abstract class AbstractAzkabanServlet extends HttpServlet {
 		if (application instanceof AzkabanWebServer) {
 			AzkabanWebServer server = (AzkabanWebServer)application;
 			viewerPlugins = server.getViewerPlugins();
+			triggerPlugins = new ArrayList<TriggerPlugin>(server.getTriggerPlugins().values());
 		}
 	}
 	
@@ -302,6 +305,10 @@ public abstract class AbstractAzkabanServlet extends HttpServlet {
 //			page.add("viewerPath", plugin.getPluginPath());
 		}
 		
+		if(triggerPlugins != null && !triggerPlugins.isEmpty()) {
+			page.add("triggerPlugins", triggerPlugins);
+		}
+		
 		return page;
 	}
 
@@ -330,6 +337,10 @@ public abstract class AbstractAzkabanServlet extends HttpServlet {
 			page.add("viewerPath", plugin.getPluginPath());
 		}
 		
+		if(triggerPlugins != null && !triggerPlugins.isEmpty()) {
+			page.add("triggers", triggerPlugins);
+		}
+		
 		return page;
 	}
 	
diff --git a/src/java/azkaban/webapp/servlet/AbstractServiceServlet.java b/src/java/azkaban/webapp/servlet/AbstractServiceServlet.java
new file mode 100644
index 0000000..eb1013e
--- /dev/null
+++ b/src/java/azkaban/webapp/servlet/AbstractServiceServlet.java
@@ -0,0 +1,91 @@
+package azkaban.webapp.servlet;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.codehaus.jackson.map.ObjectMapper;
+import azkaban.webapp.AzkabanServer;
+
+public class AbstractServiceServlet extends HttpServlet{
+	
+	private static final long serialVersionUID = 1L;
+	public static final String JSON_MIME_TYPE = "application/json";
+	
+	private AzkabanServer application;
+
+	@Override
+	public void init(ServletConfig config) throws ServletException {
+		application = (AzkabanServer) config.getServletContext().getAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY);
+
+		if (application == null) {
+			throw new IllegalStateException(
+					"No batch application is defined in the servlet context!");
+		}
+	}
+
+	protected void writeJSON(HttpServletResponse resp, Object obj) throws IOException {
+		resp.setContentType(JSON_MIME_TYPE);
+		ObjectMapper mapper = new ObjectMapper();
+		OutputStream stream = resp.getOutputStream();
+		mapper.writeValue(stream, obj);
+	}
+
+	public boolean hasParam(HttpServletRequest request, String param) {
+		return request.getParameter(param) != null;
+	}
+
+	public String getParam(HttpServletRequest request, String name)
+			throws ServletException {
+		String p = request.getParameter(name);
+		if (p == null)
+			throw new ServletException("Missing required parameter '" + name + "'.");
+		else
+			return p;
+	}
+	
+	public String getParam(HttpServletRequest request, String name, String defaultVal ) {
+		String p = request.getParameter(name);
+		if (p == null) {
+			return defaultVal;
+		}
+
+		return p;
+	}
+
+	public int getIntParam(HttpServletRequest request, String name) throws ServletException {
+		String p = getParam(request, name);
+		return Integer.parseInt(p);
+	}
+	
+	public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
+		if (hasParam(request, name)) {
+			try {
+				return getIntParam(request, name);
+			} catch (Exception e) {
+				return defaultVal;
+			}
+		}
+		return defaultVal;
+	}
+	
+	public long getLongParam(HttpServletRequest request, String name) throws ServletException {
+		String p = getParam(request, name);
+		return Long.parseLong(p);
+	}
+	
+	public long getLongParam(HttpServletRequest request, String name, long defaultVal) {
+		if (hasParam(request, name)) {
+			try {
+				return getLongParam(request, name);
+			} catch (Exception e) {
+				return defaultVal;
+			}
+		}
+		return defaultVal;
+	}
+	
+}
diff --git a/src/java/azkaban/webapp/servlet/ExecutorServlet.java b/src/java/azkaban/webapp/servlet/ExecutorServlet.java
index 4b64bc4..919045e 100644
--- a/src/java/azkaban/webapp/servlet/ExecutorServlet.java
+++ b/src/java/azkaban/webapp/servlet/ExecutorServlet.java
@@ -30,8 +30,8 @@ import javax.servlet.http.HttpServletResponse;
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableNode;
 import azkaban.executor.ExecutionOptions;
+import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.ExecutionOptions.FailureAction;
-import azkaban.executor.ExecutorManager;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.executor.Status;
 import azkaban.flow.Flow;
@@ -39,6 +39,7 @@ import azkaban.project.Project;
 import azkaban.project.ProjectManager;
 import azkaban.scheduler.Schedule;
 import azkaban.scheduler.ScheduleManager;
+import azkaban.scheduler.ScheduleManagerException;
 import azkaban.user.Permission;
 import azkaban.user.User;
 import azkaban.user.Permission.Type;
@@ -50,7 +51,7 @@ import azkaban.webapp.session.Session;
 public class ExecutorServlet extends LoginAbstractAzkabanServlet {
 	private static final long serialVersionUID = 1L;
 	private ProjectManager projectManager;
-	private ExecutorManager executorManager;
+	private ExecutorManagerAdapter executorManager;
 	private ScheduleManager scheduleManager;
 	private ExecutorVelocityHelper velocityHelper;
 
@@ -504,11 +505,16 @@ public class ExecutorServlet extends LoginAbstractAzkabanServlet {
 		ret.put("failureEmails", flow.getFailureEmails());
 		
 		Schedule sflow = null;
-		for (Schedule sched: scheduleManager.getSchedules()) {
-			if (sched.getProjectId() == project.getId() && sched.getFlowName().equals(flowId)) {
-				sflow = sched;
-				break;
+		try {
+			for (Schedule sched: scheduleManager.getSchedules()) {
+				if (sched.getProjectId() == project.getId() && sched.getFlowName().equals(flowId)) {
+					sflow = sched;
+					break;
+				}
 			}
+		} catch (ScheduleManagerException e) {
+			// TODO Auto-generated catch block
+			throw new ServletException(e);
 		}
 		
 		if (sflow != null) {
@@ -763,7 +769,7 @@ public class ExecutorServlet extends LoginAbstractAzkabanServlet {
 		options.setMailCreator(flow.getMailCreator());
 		
 		try {
-			String message = executorManager.submitExecutableFlow(exflow);
+			String message = executorManager.submitExecutableFlow(exflow, user.getUserId());
 			ret.put("message", message);
 		}
 		catch (ExecutorManagerException e) {
diff --git a/src/java/azkaban/webapp/servlet/HistoryServlet.java b/src/java/azkaban/webapp/servlet/HistoryServlet.java
index 9cf4229..95874fa 100644
--- a/src/java/azkaban/webapp/servlet/HistoryServlet.java
+++ b/src/java/azkaban/webapp/servlet/HistoryServlet.java
@@ -32,6 +32,7 @@ import org.joda.time.format.DateTimeFormat;
 
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutorManager;
+import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.project.Project;
 import azkaban.project.ProjectManager;
@@ -41,7 +42,7 @@ import azkaban.webapp.session.Session;
 public class HistoryServlet extends LoginAbstractAzkabanServlet {
 
 	private static final long serialVersionUID = 1L;
-	private ExecutorManager executorManager;
+	private ExecutorManagerAdapter executorManager;
 	private ProjectManager projectManager;
 	private ExecutorVMHelper vmHelper;
 	
diff --git a/src/java/azkaban/webapp/servlet/JMXHttpServlet.java b/src/java/azkaban/webapp/servlet/JMXHttpServlet.java
index af2d7e8..1a5d1d7 100644
--- a/src/java/azkaban/webapp/servlet/JMXHttpServlet.java
+++ b/src/java/azkaban/webapp/servlet/JMXHttpServlet.java
@@ -33,6 +33,8 @@ import org.apache.log4j.Logger;
 
 import azkaban.executor.ConnectorParams;
 import azkaban.executor.ExecutorManager;
+import azkaban.executor.ExecutorManagerAdapter;
+import azkaban.trigger.TriggerManager;
 import azkaban.user.Permission;
 import azkaban.user.Role;
 import azkaban.user.User;
@@ -53,7 +55,8 @@ public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements Conne
 
 	private UserManager userManager;
 	private AzkabanWebServer server;
-	private ExecutorManager executorManager;
+	private ExecutorManagerAdapter executorManager;
+	private TriggerManager triggerManager;
 	
 	@Override
 	public void init(ServletConfig config) throws ServletException {
@@ -62,6 +65,8 @@ public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements Conne
 		server = (AzkabanWebServer)getApplication();
 		userManager = server.getUserManager();
 		executorManager = server.getExecutorManager();
+
+		triggerManager = server.getTriggerManager();
 	}
 	
 	@Override
@@ -69,7 +74,7 @@ public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements Conne
 		if (hasParam(req, "ajax")){
 			Map<String,Object> ret = new HashMap<String,Object>();
 
-			if(!hasAdminRole(session.getUser())) {
+			if(!hasPermission(session.getUser(), Permission.Type.METRICS)) {
 				ret.put("error", "User " + session.getUser().getUserId() + " has no permission.");
 				this.writeJSON(resp, ret, true);
 				return;
@@ -87,6 +92,17 @@ public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements Conne
 				Map<String, Object> result = executorManager.callExecutorJMX(hostPort, JMX_GET_ALL_MBEAN_ATTRIBUTES, mbean);
 				ret = result;
 			}
+//			else 
+//				if (TriggerConnectorParams.JMX_GET_ALL_TRIGGER_SERVER_ATTRIBUTES.equals(ajax)) {
+//				if(!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_HOSTPORT)) {
+//					ret.put("error", "Parameters '" + JMX_MBEAN + "' and '"+ JMX_HOSTPORT +"' must be set");
+//					this.writeJSON(resp, ret, true);
+//					return;
+//				}
+////				String hostPort = getParam(req, JMX_HOSTPORT);
+////				String mbean = getParam(req, JMX_MBEAN);
+//				ret = triggerManager.getJMX().getAllJMXMbeans();
+//			}
 			else if (JMX_GET_MBEANS.equals(ajax)) {
 				ret.put("mbeans", server.getMbeanNames());
 			}
@@ -167,7 +183,7 @@ public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements Conne
 	private void handleJMXPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws IOException {
 		Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/jmxpage.vm");
 		
-		if(!hasAdminRole(session.getUser())) {
+		if(!hasPermission(session.getUser(), Permission.Type.METRICS)) {
 			page.add("errorMsg", "User " + session.getUser().getUserId() + " has no permission.");
 			page.render();
 			return;
@@ -176,24 +192,47 @@ public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements Conne
 		page.add("mbeans", server.getMbeanNames());
 		
 		Map<String, Object> executorMBeans = new HashMap<String,Object>();
-		Set<String> primaryServerHosts = executorManager.getPrimaryServerHosts();
+//		Set<String> primaryServerHosts = executorManager.getPrimaryServerHosts();
 		for (String hostPort: executorManager.getAllActiveExecutorServerHosts()) {
 			try {
 				Map<String, Object> mbeans = executorManager.callExecutorJMX(hostPort, JMX_GET_MBEANS, null);
 	
-				if (primaryServerHosts.contains(hostPort)) {
-					executorMBeans.put(hostPort, mbeans.get("mbeans"));
-				}
-				else {
-					executorMBeans.put(hostPort, mbeans.get("mbeans"));
-				}
+				executorMBeans.put(hostPort, mbeans.get("mbeans"));
+//				if (primaryServerHosts.contains(hostPort)) {
+//					executorMBeans.put(hostPort, mbeans.get("mbeans"));
+//				}
+//				else {
+//					executorMBeans.put(hostPort, mbeans.get("mbeans"));
+//				}
 			}
 			catch (IOException e) {
 				logger.error("Cannot contact executor " + hostPort, e);
 			}
 		}
 		
-		page.add("remoteMBeans", executorMBeans);
+		page.add("executorRemoteMBeans", executorMBeans);
+		
+		Map<String, Object> triggerserverMBeans = new HashMap<String,Object>();
+//		Set<String> primaryTriggerServerHosts = triggerManager.getPrimaryServerHosts();
+//		for (String hostPort: triggerManager.getAllActiveTriggerServerHosts()) {
+//			try {
+//				Map<String, Object> mbeans = triggerManager.callTriggerServerJMX(hostPort, TriggerConnectorParams.JMX_GET_MBEANS, null);
+//				
+//				if (primaryTriggerServerHosts.contains(hostPort)) {
+//					triggerserverMBeans.put(hostPort, mbeans.get("mbeans"));
+//				}
+//				else {
+//					triggerserverMBeans.put(hostPort, mbeans.get("mbeans"));
+//				}
+//			}
+//			catch (IOException e) {
+//				logger.error("Cannot contact executor " + hostPort, e);
+//			}
+//		}
+		triggerserverMBeans.put(triggerManager.getJMX().getPrimaryServerHost(), triggerManager.getJMX().getAllJMXMbeans());
+		
+		page.add("triggerserverRemoteMBeans", triggerserverMBeans);
+		
 		page.render();
 	}
 	
@@ -202,11 +241,22 @@ public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements Conne
 
 	}
 	
-	private boolean hasAdminRole(User user) {
+//	private boolean hasAdminRole(User user) {
+//		for(String roleName: user.getRoles()) {
+//			Role role = userManager.getRole(roleName);
+//			Permission perm = role.getPermission();
+//			if (perm.isPermissionSet(Permission.Type.ADMIN)) {
+//				return true;
+//			}
+//		}
+//		
+//		return false;
+//	}
+	
+	protected boolean hasPermission(User user, Permission.Type type) {	
 		for(String roleName: user.getRoles()) {
 			Role role = userManager.getRole(roleName);
-			Permission perm = role.getPermission();
-			if (perm.isPermissionSet(Permission.Type.ADMIN)) {
+			if (role.getPermission().isPermissionSet(type) || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
 				return true;
 			}
 		}
diff --git a/src/java/azkaban/webapp/servlet/ProjectManagerServlet.java b/src/java/azkaban/webapp/servlet/ProjectManagerServlet.java
index b3979d6..868c85d 100644
--- a/src/java/azkaban/webapp/servlet/ProjectManagerServlet.java
+++ b/src/java/azkaban/webapp/servlet/ProjectManagerServlet.java
@@ -45,6 +45,7 @@ import org.apache.log4j.Logger;
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableJobInfo;
 import azkaban.executor.ExecutorManager;
+import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.flow.Edge;
 import azkaban.flow.Flow;
@@ -56,6 +57,7 @@ import azkaban.project.ProjectManager;
 import azkaban.project.ProjectManagerException;
 import azkaban.scheduler.Schedule;
 import azkaban.scheduler.ScheduleManager;
+import azkaban.scheduler.ScheduleManagerException;
 import azkaban.user.Permission;
 import azkaban.user.Role;
 import azkaban.user.UserManager;
@@ -75,7 +77,7 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
 	private static final String LOCKDOWN_CREATE_PROJECTS_KEY = "lockdown.create.projects";
 	
 	private ProjectManager projectManager;
-	private ExecutorManager executorManager;
+	private ExecutorManagerAdapter executorManager;
 	private ScheduleManager scheduleManager;
 	private UserManager userManager;
 
@@ -360,13 +362,19 @@ public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
 		
 		// Check if scheduled
 		Schedule sflow = null;
-		for (Schedule flow: scheduleManager.getSchedules()) {
+		try {
+			for (Schedule flow: scheduleManager.getSchedules()) {
 
-			if (flow.getProjectId() == project.getId()) {
-				sflow = flow;
-				break;
+				if (flow.getProjectId() == project.getId()) {
+					sflow = flow;
+					break;
+				}
 			}
+		} catch (ScheduleManagerException e) {
+			// TODO Auto-generated catch block
+			throw new ServletException(e);
 		}
+		
 		if (sflow != null) {
 			this.setErrorMessageInCookie(resp, "Cannot delete. Please unschedule " + sflow.getScheduleName() + ".");
 
diff --git a/src/java/azkaban/webapp/servlet/ScheduleServlet.java b/src/java/azkaban/webapp/servlet/ScheduleServlet.java
index 7e881dc..566437d 100644
--- a/src/java/azkaban/webapp/servlet/ScheduleServlet.java
+++ b/src/java/azkaban/webapp/servlet/ScheduleServlet.java
@@ -46,7 +46,7 @@ import org.joda.time.format.DateTimeFormat;
 
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
-import azkaban.executor.ExecutorManager;
+import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.flow.Flow;
 import azkaban.flow.Node;
@@ -57,16 +57,13 @@ import azkaban.scheduler.Schedule;
 import azkaban.scheduler.ScheduleManager;
 import azkaban.scheduler.ScheduleManagerException;
 import azkaban.scheduler.ScheduleStatisticManager;
-import azkaban.sla.SLA;
-import azkaban.sla.SLA.SlaAction;
-import azkaban.sla.SLA.SlaRule;
-import azkaban.sla.SLA.SlaSetting;
-import azkaban.sla.SlaOptions;
+import azkaban.sla.SlaOption;
 import azkaban.user.Permission;
 import azkaban.user.Permission.Type;
 import azkaban.user.User;
 import azkaban.utils.JSONUtils;
 import azkaban.utils.SplitterOutputStream;
+import azkaban.utils.Utils;
 import azkaban.webapp.AzkabanWebServer;
 import azkaban.webapp.session.Session;
 
@@ -107,8 +104,8 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 		}
 		else if(ajaxName.equals("setSla")) {
 			ajaxSetSla(req, ret, session.getUser());
-		}
-		else if(ajaxName.equals("loadFlow")) {
+		} else
+		if(ajaxName.equals("loadFlow")) {
 			ajaxLoadFlows(req, ret, session.getUser());
 		}
 		else if(ajaxName.equals("loadHistory")) {
@@ -136,39 +133,30 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 				ret.put("error", "User " + user + " does not have permission to set SLA for this flow.");
 				return;
 			}
-			
-			
-			SlaOptions slaOptions= new SlaOptions();
-			
-			String slaEmails = getParam(req, "slaEmails");
-			String[] emailSplit = slaEmails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
+				
+			String emailStr = getParam(req, "slaEmails");
+			String[] emailSplit = emailStr.split("\\s*,\\s*|\\s*;\\s*|\\s+");
+			List<String> slaEmails = Arrays.asList(emailSplit);
 			
 			Map<String, String> settings = getParamGroup(req, "settings");
-			List<SlaSetting> slaSettings = new ArrayList<SlaSetting>();
+			
+			List<SlaOption> slaOptions = new ArrayList<SlaOption>();
 			for(String set : settings.keySet()) {
-				SlaSetting s;
+				SlaOption sla;
 				try {
-				s = parseSlaSetting(settings.get(set));
+				sla = parseSlaSetting(settings.get(set));
+				sla.getInfo().put(SlaOption.INFO_FLOW_NAME, sched.getFlowName());
+				sla.getInfo().put(SlaOption.INFO_EMAIL_LIST, slaEmails);
 				}
 				catch (Exception e) {
 					throw new ServletException(e);
 				}
-				if(s != null) {
-					slaSettings.add(s);
+				if(sla != null) {
+					sla.getInfo().put("SlaEmails", slaEmails);
+					slaOptions.add(sla);
 				}
 			}
 			
-			if(slaSettings.size() > 0) {
-				if(slaEmails.equals("")) {
-					ret.put("error", "Please put correct email settings for your SLA actions");
-					return;
-				}
-				slaOptions.setSlaEmails(Arrays.asList(emailSplit));
-				slaOptions.setSettings(slaSettings);
-			}
-			else {
-				slaOptions = null;
-			}
 			sched.setSlaOptions(slaOptions);
 			scheduleManager.insertSchedule(sched);
 
@@ -178,22 +166,50 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 			
 		} catch (ServletException e) {
 			ret.put("error", e.getMessage());
+		} catch (ScheduleManagerException e) {
+			ret.put("error", e.getMessage());
 		}
 		
 	}
 	
-	private SlaSetting parseSlaSetting(String set) throws ScheduleManagerException {
+	private SlaOption parseSlaSetting(String set) throws ScheduleManagerException {
 		// "" + Duration + EmailAction + KillAction
+		logger.info("Tryint to set sla with the following set: " + set);
+		
+		String slaType;
+		List<String> slaActions = new ArrayList<String>();
+		Map<String, Object> slaInfo = new HashMap<String, Object>();
 		String[] parts = set.split(",", -1);
 		String id = parts[0];
-		String rule = parts[1];
+		String rule = parts[1];	
 		String duration = parts[2];
 		String emailAction = parts[3];
 		String killAction = parts[4];
 		if(emailAction.equals("true") || killAction.equals("true")) {
-			SlaSetting r = new SlaSetting();			
-			r.setId(id);
-			r.setRule(SlaRule.valueOf(rule));
+			//String type = id.equals("") ? SlaOption.RULE_FLOW_RUNTIME_SLA : SlaOption.RULE_JOB_RUNTIME_SLA ;
+			if(emailAction.equals("true")) {
+				slaActions.add(SlaOption.ACTION_ALERT);
+				slaInfo.put(SlaOption.ALERT_TYPE, "email");
+			}
+			if(killAction.equals("true")) {
+				slaActions.add(SlaOption.ACTION_CANCEL_FLOW);
+			}
+			if(id.equals("")) {
+				if(rule.equals("SUCCESS")) {
+					slaType = SlaOption.TYPE_FLOW_SUCCEED;
+				}
+				else {
+					slaType = SlaOption.TYPE_FLOW_FINISH;
+				}
+			} else {
+				slaInfo.put(SlaOption.INFO_JOB_NAME, id);
+				if(rule.equals("SUCCESS")) {
+					slaType = SlaOption.TYPE_JOB_SUCCEED;
+				} else {
+					slaType = SlaOption.TYPE_JOB_FINISH;
+				}
+			}
+			
 			ReadablePeriod dur;
 			try {
 				dur = parseDuration(duration);
@@ -201,15 +217,10 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 			catch (Exception e) {
 				throw new ScheduleManagerException("Unable to parse duration for a SLA that needs to take actions!", e);
 			}
-			r.setDuration(dur);
-			List<SlaAction> actions = new ArrayList<SLA.SlaAction>();
-			if(emailAction.equals("true")) {
-				actions.add(SlaAction.EMAIL);
-			}
-			if(killAction.equals("true")) {
-				actions.add(SlaAction.KILL);
-			}
-			r.setActions(actions);
+
+			slaInfo.put(SlaOption.INFO_DURATION, Utils.createPeriodString(dur));
+			SlaOption r = new SlaOption(slaType, slaActions, slaInfo);
+			logger.info("Parsing sla as id:" + id + " type:" + slaType + " rule:" + rule + " Duration:" + duration + " actions:" + slaActions);
 			return r;
 		}
 		return null;
@@ -240,15 +251,15 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 				return;
 			}
 			
-			SlaOptions slaOptions = sched.getSlaOptions();
+			List<SlaOption> slaOptions = sched.getSlaOptions();
 			ExecutionOptions flowOptions = sched.getExecutionOptions();
 			
-			if(slaOptions != null) {
-				ret.put("slaEmails", slaOptions.getSlaEmails());
-				List<SlaSetting> settings = slaOptions.getSettings();
+			if(slaOptions != null && slaOptions.size() > 0) {
+				ret.put("slaEmails", slaOptions.get(0).getInfo().get(SlaOption.INFO_EMAIL_LIST));
+				
 				List<Object> setObj = new ArrayList<Object>();
-				for(SlaSetting set: settings) {
-					setObj.add(set.toObject());
+				for(SlaOption sla: slaOptions) {
+					setObj.add(sla.toWebObject());
 				}
 				ret.put("settings", setObj);
 			}
@@ -286,6 +297,8 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 			ret.put("allJobNames", allJobs);
 		} catch (ServletException e) {
 			ret.put("error", e);
+		} catch (ScheduleManagerException e) {
+			ret.put("error", e);
 		}
 		
 	}
@@ -311,7 +324,13 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 		
 		Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/scheduledflowpage.vm");
 		
-		List<Schedule> schedules = scheduleManager.getSchedules();
+		List<Schedule> schedules;
+		try {
+			schedules = scheduleManager.getSchedules();
+		} catch (ScheduleManagerException e) {
+			// TODO Auto-generated catch block
+			throw new ServletException(e);
+		}
 		page.add("schedules", schedules);
 //		
 //		List<SLA> slas = slaManager.getSLAs();
@@ -325,7 +344,13 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 		
 		Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/scheduledflowcalendarpage.vm");
 		
-		List<Schedule> schedules = scheduleManager.getSchedules();
+		List<Schedule> schedules;
+		try {
+			schedules = scheduleManager.getSchedules();
+		} catch (ScheduleManagerException e) {
+			// TODO Auto-generated catch block
+			throw new ServletException(e);
+		}
 		page.add("schedules", schedules);
 //		
 //		List<SLA> slas = slaManager.getSLAs();
@@ -362,7 +387,13 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 
 	private void ajaxLoadFlows(HttpServletRequest req, HashMap<String, Object> ret, User user) throws ServletException {
 		
-		List<Schedule> schedules = scheduleManager.getSchedules();
+		List<Schedule> schedules;
+		try {
+			schedules = scheduleManager.getSchedules();
+		} catch (ScheduleManagerException e) {
+			// TODO Auto-generated catch block
+			throw new ServletException(e);
+		}
 		// See if anything is scheduled
 		if (schedules.size() <= 0)
 			return;
@@ -371,11 +402,16 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 		ret.put("items", output);
 
 		for (Schedule schedule : schedules) {
-			writeScheduleData(output, schedule);
+			try {
+				writeScheduleData(output, schedule);
+			} catch (ScheduleManagerException e) {
+				// TODO Auto-generated catch block
+				throw new ServletException(e);
+			}
 		}
 	}
 
-	private void writeScheduleData(List<HashMap<String, Object>> output, Schedule schedule) {
+	private void writeScheduleData(List<HashMap<String, Object>> output, Schedule schedule) throws ScheduleManagerException {
 		Map<String, Object> stats = ScheduleStatisticManager.getStatistics(schedule.getScheduleId(), (AzkabanWebServer) getApplication());
 		HashMap<String, Object> data = new HashMap<String, Object>();
 		data.put("scheduleid", schedule.getScheduleId());
@@ -442,7 +478,7 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 		List<ExecutableFlow> history = null;
 		try {
 			AzkabanWebServer server = (AzkabanWebServer) getApplication();
-			ExecutorManager executorManager = server.getExecutorManager();
+			ExecutorManagerAdapter executorManager = server.getExecutorManager();
 			history = executorManager.getExecutableFlows(null, null, null, 0, startTime, endTime, -1, -1);
 		} catch (ExecutorManagerException e) {
 			logger.error(e);
@@ -503,7 +539,13 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 
 	private void ajaxRemoveSched(HttpServletRequest req, Map<String, Object> ret, User user) throws ServletException{
 		int scheduleId = getIntParam(req, "scheduleId");
-		Schedule sched = scheduleManager.getSchedule(scheduleId);
+		Schedule sched;
+		try {
+			sched = scheduleManager.getSchedule(scheduleId);
+		} catch (ScheduleManagerException e) {
+			// TODO Auto-generated catch block
+			throw new ServletException(e);
+		}
 		if(sched == null) {
 			ret.put("message", "Schedule with ID " + scheduleId + " does not exist");
 			ret.put("status", "error");
@@ -588,7 +630,8 @@ public class ScheduleServlet extends LoginAbstractAzkabanServlet {
 		catch (Exception e) {
 			ret.put("error", e.getMessage());
 		}
-		SlaOptions slaOptions = null;
+		
+		List<SlaOption> slaOptions = null;
 		
 		Schedule schedule = scheduleManager.scheduleFlow(-1, projectId, projectName, flowName, "ready", firstSchedTime.getMillis(), firstSchedTime.getZone(), thePeriod, DateTime.now().getMillis(), firstSchedTime.getMillis(), firstSchedTime.getMillis(), user.getUserId(), flowOptions, slaOptions);
 		logger.info("User '" + user.getUserId() + "' has scheduled " + "[" + projectName + flowName +  " (" + projectId +")" + "].");
diff --git a/src/java/azkaban/webapp/servlet/TriggerManagerServlet.java b/src/java/azkaban/webapp/servlet/TriggerManagerServlet.java
new file mode 100644
index 0000000..44f0f1a
--- /dev/null
+++ b/src/java/azkaban/webapp/servlet/TriggerManagerServlet.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2012 LinkedIn, Inc
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package azkaban.webapp.servlet;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.log4j.Logger;
+import azkaban.trigger.Trigger;
+import azkaban.trigger.TriggerManager;
+import azkaban.trigger.TriggerManagerException;
+import azkaban.user.User;
+import azkaban.webapp.AzkabanWebServer;
+import azkaban.webapp.session.Session;
+
+public class TriggerManagerServlet extends LoginAbstractAzkabanServlet {
+	private static final long serialVersionUID = 1L;
+	private static final Logger logger = Logger.getLogger(TriggerManagerServlet.class);
+	private TriggerManager triggerManager;
+
+	@Override
+	public void init(ServletConfig config) throws ServletException {
+		super.init(config);
+		AzkabanWebServer server = (AzkabanWebServer)getApplication();
+		triggerManager = server.getTriggerManager();
+	}
+	
+	@Override
+	protected void handleGet(HttpServletRequest req, HttpServletResponse resp,
+			Session session) throws ServletException, IOException {
+		if (hasParam(req, "ajax")) {
+			handleAJAXAction(req, resp, session);
+		} else {
+			handleGetAllSchedules(req, resp, session);
+		}
+	}
+	
+	private void handleAJAXAction(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
+		HashMap<String, Object> ret = new HashMap<String, Object>();
+		String ajaxName = getParam(req, "ajax");
+		
+		try {
+			if (ajaxName.equals("expireTrigger")) {
+				ajaxExpireTrigger(req, ret, session.getUser());
+			}
+		} catch (Exception e) {
+			ret.put("error", e.getMessage());
+		}
+		
+		if (ret != null) {
+			this.writeJSON(resp, ret);
+		}
+	}
+
+	private void handleGetAllSchedules(HttpServletRequest req, HttpServletResponse resp,
+			Session session) throws ServletException, IOException{
+		
+		Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/triggerspage.vm");
+		
+		List<Trigger> triggers = triggerManager.getTriggers();
+		page.add("triggers", triggers);
+//		
+//		List<SLA> slas = slaManager.getSLAs();
+//		page.add("slas", slas);
+
+		page.render();
+	}
+	
+	@Override
+	protected void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
+		if (hasParam(req, "ajax")) {
+			handleAJAXAction(req, resp, session);
+		}
+	}
+
+	private void ajaxExpireTrigger(HttpServletRequest req, Map<String, Object> ret, User user) throws ServletException, TriggerManagerException{
+		int triggerId = getIntParam(req, "triggerId");
+		Trigger t = triggerManager.getTrigger(triggerId);
+		if(t == null) {
+			ret.put("message", "Trigger with ID " + triggerId + " does not exist");
+			ret.put("status", "error");
+			return;
+		}
+		
+//		if(!hasPermission(project, user, Type.SCHEDULE)) {
+//			ret.put("status", "error");
+//			ret.put("message", "Permission denied. Cannot remove trigger with id " + triggerId);
+//			return;
+//		}
+
+		triggerManager.expireTrigger(triggerId);
+		logger.info("User '" + user.getUserId() + " has removed trigger " + t.getDescription());
+//		projectManager.postProjectEvent(project, EventType.SCHEDULE, user.getUserId(), "Schedule " + sched.toString() + " has been removed.");
+		
+		ret.put("status", "success");
+		ret.put("message", "trigger " + triggerId + " removed from Schedules.");
+		return;
+	}
+
+}
+
diff --git a/src/java/azkaban/webapp/servlet/TriggerPlugin.java b/src/java/azkaban/webapp/servlet/TriggerPlugin.java
new file mode 100644
index 0000000..b2d9db9
--- /dev/null
+++ b/src/java/azkaban/webapp/servlet/TriggerPlugin.java
@@ -0,0 +1,33 @@
+package azkaban.webapp.servlet;
+
+import azkaban.trigger.TriggerAgent;
+
+public interface TriggerPlugin {
+	
+//	public TriggerPlugin(String pluginName, Props props, AzkabanWebServer azkabanWebApp) {
+//		this.pluginName = pluginName;
+//		this.pluginPath = props.getString("trigger.path");
+//		this.order = props.getInt("trigger.order", 0);
+//		this.hidden = props.getBoolean("trigger.hidden", false);
+//
+//	}
+
+	public AbstractAzkabanServlet getServlet();
+	public TriggerAgent getAgent();
+	public void load();
+	
+	public String getPluginName();
+
+	public String getPluginPath();
+
+	public int getOrder();
+	
+	public boolean isHidden();
+
+	public void setHidden(boolean hidden);
+	
+	public String getInputPanelVM();
+	
+	
+	
+}
diff --git a/src/java/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm b/src/java/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm
index d2aba0a..f903ac5 100644
--- a/src/java/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm
+++ b/src/java/azkaban/webapp/servlet/velocity/flowexecutionpanel.vm
@@ -151,7 +151,14 @@
 		#if(!$show_schedule || $show_schedule == 'true') 
 		<a class="btn2" id="schedule-btn">Schedule</a>
 		#end
-
+		
+#*
+		#if( $triggerPlugins.size() > 0 ) 
+			#foreach( $triggerPlugin in $triggerPlugins )
+			<a class="btn2" id=set-$triggerPlugin.pluginName>$triggerPlugin.pluginName</a>
+			#end
+		#end
+*#
 		<a class="yes btn1" id="execute-btn">Execute</a>
 		<a class="no simplemodal-close btn3 closeExecPanel">Cancel</a>
 	</div>
@@ -162,6 +169,16 @@
 #parse( "azkaban/webapp/servlet/velocity/schedulepanel.vm" )
 #end
 
+#*
+#if( $triggerPlugins.size() > 0 ) 
+	#foreach( $triggerPlugin in $triggerPlugins )
+		#set ($prefix = $triggerPlugin.pluginName )
+		#set ($webpath = $triggerPlugin.pluginPath )
+		#parse( $triggerPlugin.inputPanelVM )
+	#end
+#end
+*#
+
 <div id="contextMenu">
 	
 </div>
diff --git a/src/java/azkaban/webapp/servlet/velocity/jmxpage.vm b/src/java/azkaban/webapp/servlet/velocity/jmxpage.vm
index a228357..ba299db 100644
--- a/src/java/azkaban/webapp/servlet/velocity/jmxpage.vm
+++ b/src/java/azkaban/webapp/servlet/velocity/jmxpage.vm
@@ -89,7 +89,7 @@
 				</tbody>
 			</table>
 
-#foreach($executor in $remoteMBeans.entrySet())
+#foreach($executor in $executorRemoteMBeans.entrySet())
 			<h3 class="subhead">Remote Executor JMX $executor.key</h3>
 			<table class="all-jobs job-table remoteJMX">
 				<thead>
@@ -131,6 +131,50 @@
 				</tbody>
 			</table>
 #end
+
+#foreach($triggerserver in $triggerserverRemoteMBeans.entrySet())
+			<h3 class="subhead">Remote Trigger Server JMX $triggerserver.key</h3>
+			<table class="all-jobs job-table remoteJMX">
+				<thead>
+					<tr>
+						<th>Name</th>
+						<th>Domain</th>
+						<th>Canonical Name</th>
+						<th></th>
+					</tr>
+				</thead>
+				<tbody>
+					#foreach($bean in $triggerserver.value)
+						<tr>
+							<td>${bean.get("keyPropertyList").get("name")}</td>
+							<td>${bean.get("domain")}</td>
+							<td>${bean.get("canonicalName")}</td>
+							<td><div class="btn4 querybtn" id="expandBtn-$counter" domain="${bean.get("domain")}" name="${bean.get("keyPropertyList").get("name")}" hostport="$triggerserver.key">Query</div></td>
+						</tr>
+					<tr class="childrow" id="expandBtn-${counter}-child"  style="display: none;">
+						<td class="expandedFlow" colspan="3">
+							<table class="innerTable">
+								<thead>
+									<tr>
+										<th>Attribute Name</th>
+										<th>Value</th>
+									</tr>
+								</thead>
+								<tbody id="expandBtn-${counter}-tbody">
+								</tbody>
+							</table>
+						</td>
+
+						<td>
+							<div class="btn4 collapse">Collapse</div>
+						</td>
+				</tr>
+				#set($counter=$counter + 1)
+					#end 
+				</tbody>
+			</table>
+#end
+
 		</div>
 	</body>
 </html>
diff --git a/src/java/azkaban/webapp/servlet/velocity/nav.vm b/src/java/azkaban/webapp/servlet/velocity/nav.vm
index 0ac1eb7..c2fef78 100644
--- a/src/java/azkaban/webapp/servlet/velocity/nav.vm
+++ b/src/java/azkaban/webapp/servlet/velocity/nav.vm
@@ -25,6 +25,9 @@
 			<ul id="nav" class="nav">
 				<li id="all-jobs-tab" #if($current_page == 'all')class="selected"#end onClick="navMenuClick('$!context/index')"><a href="$!context/index">Projects</a></li>
 				<li id="scheduled-jobs-tab" #if($current_page == 'schedule')class="selected"#end onClick="navMenuClick('$!context/schedule')"><a href="$!context/schedule">Scheduled</a></li>
+
+				<!--li id="triggers-tab" #if($current_page == 'triggers')class="selected"#end onClick="navMenuClick('$!context/triggers')"><a href="$!context/triggers">Triggers</a></li-->
+
 				<li id="executing-jobs-tab" #if($current_page == 'executing')class="selected"#end onClick="navMenuClick('$!context/executor')"><a href="$!context/executor">Executing</a></li>
 				<li id="history-jobs-tab" #if($current_page == 'history')class="selected"#end onClick="navMenuClick('$!context/history')"><a href="$!context/history">History</a></li>
 				
@@ -35,6 +38,15 @@
 					</li>
 					#end
 				#end
+				
+				#foreach($triggerPlugin in $triggerPlugins)
+					#if(!$triggerPlugin.hidden)
+					<li #if($current_page == $triggerPlugin.pluginName) class="selected"#end onClick="navMenuClick('$!context/$triggerPlugin.pluginPath')">
+						<a href="$!context/$triggerPlugin.pluginPath">$triggerPlugin.pluginName</a>
+					</li>
+					#end
+				#end
+				
 			</ul>
 			
 			<div id="user-id">
diff --git a/src/java/azkaban/webapp/servlet/velocity/triggerspage.vm b/src/java/azkaban/webapp/servlet/velocity/triggerspage.vm
new file mode 100644
index 0000000..6ccb348
--- /dev/null
+++ b/src/java/azkaban/webapp/servlet/velocity/triggerspage.vm
@@ -0,0 +1,99 @@
+#*
+ * Copyright 2012 LinkedIn, Inc
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+*#
+
+<!DOCTYPE html> 
+<html>
+	<head>
+#parse( "azkaban/webapp/servlet/velocity/style.vm" )
+		<link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui-1.10.1.custom.css" />
+		<link rel="stylesheet" type="text/css" href="${context}/css/jquery-ui.css" />
+		
+		<script type="text/javascript" src="${context}/js/jquery/jquery-1.9.1.js"></script>    
+		<script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-1.10.1.custom.js"></script>
+		<script type="text/javascript" src="${context}/js/underscore-1.4.4-min.js"></script>
+		<script type="text/javascript" src="${context}/js/namespace.js"></script>
+		<script type="text/javascript" src="${context}/js/backbone-0.9.10-min.js"></script>
+		<script type="text/javascript" src="${context}/js/jquery.simplemodal-1.4.4.js"></script>
+		
+		<script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-timepicker-addon.js"></script> 
+		<script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-sliderAccess.js"></script>
+		<script type="text/javascript" src="${context}/js/azkaban.table.sort.js"></script>
+		<script type="text/javascript" src="${context}/js/azkaban.nav.js"></script>
+		<script type="text/javascript" src="${context}/js/azkaban.triggers.view.js"></script>
+		<script type="text/javascript">
+			var contextURL = "${context}";
+			var currentTime = ${currentTime};
+			var timezone = "${timezone}";
+			var errorMessage = null;
+			var successMessage = null;
+		</script>
+	</head>
+	<body>
+#set($current_page="triggers")
+#parse( "azkaban/webapp/servlet/velocity/nav.vm" )
+		<div class="messaging"><p id="messageClose">X</p><p id="message"></p></div>  
+
+		<div class="content">
+		
+#if($errorMsg)
+		<div class="box-error-message">$errorMsg</div>
+#else
+#if($error_message != "null")
+		<div class="box-error-message">$error_message</div>
+#elseif($success_message != "null")
+		<div class="box-success-message">$success_message</div>
+#end
+#end		
+		
+			<div id="all-triggers-content">
+				<div class="section-hd">
+					<h2>All Triggers</h2>
+				</div>
+			</div>
+			
+			<div class="triggers">
+				<table id="triggersTbl">
+					<thead>
+						<tr>
+							<th>ID</th>
+							<th>Source</th>
+							<th>Submitted By</th>
+							<th>Description</th>
+							<th>Status</th>
+							<!--th colspan="2" class="action ignoresort">Action</th-->
+						</tr>
+					</thead>
+					<tbody>
+						#if($triggers)
+#foreach($trigger in $triggers)
+						<tr class="row" >
+
+							<td>${trigger.triggerId}</td>
+							<td>${trigger.source}</td>
+							<td>${trigger.submitUser}</td>
+							<td>${trigger.getDescription()}</td>
+							<td>${trigger.getStatus()}</td>
+							<!--td><button id="expireTriggerBtn" onclick="expireTrigger(${trigger.triggerId})" >Expire Trigger</button></td-->
+						</tr>
+#end
+#else
+						<tr><td class="last">No Trigger Found</td></tr>
+#end
+					</tbody>
+				</table>
+			</div>
+	</body>
+</html>
diff --git a/src/java/azkaban/webapp/session/SessionCache.java b/src/java/azkaban/webapp/session/SessionCache.java
index 65a743f..2d033d5 100644
--- a/src/java/azkaban/webapp/session/SessionCache.java
+++ b/src/java/azkaban/webapp/session/SessionCache.java
@@ -20,7 +20,6 @@ import azkaban.utils.Props;
 import azkaban.utils.cache.Cache;
 import azkaban.utils.cache.CacheManager;
 import azkaban.utils.cache.Cache.EjectionPolicy;
-import azkaban.utils.cache.Element;
 
 
 /**
diff --git a/src/package/execserver/bin/azkaban-executor-start.sh b/src/package/execserver/bin/azkaban-executor-start.sh
index 912eaa0..f6313f7 100755
--- a/src/package/execserver/bin/azkaban-executor-start.sh
+++ b/src/package/execserver/bin/azkaban-executor-start.sh
@@ -1,7 +1,7 @@
 azkaban_dir=$(dirname $0)/..
 
 if [[ -z "$tmpdir" ]]; then
-tmpdir=temp
+tmpdir=/tmp
 fi
 
 for file in $azkaban_dir/lib/*.jar;
@@ -19,6 +19,21 @@ do
   CLASSPATH=$CLASSPATH:$file
 done
 
+if [ "HADOOP_HOME" != "" ]; then
+	for file in $HADOOP_HOME/hadoop-core*.jar ;
+	do
+		CLASSPATH=$CLASSPATH:$file
+	done
+	CLASSPATH=$CLASSPATH:$HADOOP_HOME/conf
+    JAVA_LIB_PATH="-Djava.library.path=$HADOOP_HOME/lib/native/Linux-amd64-64"
+else
+	echo "Error: HADOOP_HOME is not set. Hadoop job types will not run properly."
+fi
+
+if [ "HIVE_HOME" != "" ]; then
+    CLASSPATH=$CLASSPATH:$HIVE_HOME/conf
+fi
+
 echo $azkaban_dir;
 echo $CLASSPATH;
 
@@ -27,11 +42,11 @@ echo "Starting AzkabanExecutorServer on port $executorport ..."
 serverpath=`pwd`
 
 if [ -z $AZKABAN_OPTS ]; then
-  AZKABAN_OPTS=-Xmx3G
+  AZKABAN_OPTS="-Xmx3G"
 fi
 AZKABAN_OPTS="$AZKABAN_OPTS -server -Dcom.sun.management.jmxremote -Djava.io.tmpdir=$tmpdir -Dexecutorport=$executorport -Dserverpath=$serverpath"
 
-java $AZKABAN_OPTS -cp $CLASSPATH azkaban.execapp.AzkabanExecutorServer -conf $azkaban_dir/conf $@ &
+java $AZKABAN_OPTS $JAVA_LIB_PATH -cp $CLASSPATH azkaban.execapp.AzkabanExecutorServer -conf $azkaban_dir/conf $@ &
 
 echo $! > currentpid
 
diff --git a/src/package/execserver/bin/start-exec.sh b/src/package/execserver/bin/start-exec.sh
new file mode 100644
index 0000000..f44b2ec
--- /dev/null
+++ b/src/package/execserver/bin/start-exec.sh
@@ -0,0 +1,4 @@
+base_dir=$(dirname $0)/..
+
+bin/azkaban-executor-start.sh $base_dir 2>&1>logs/executorServerLog__`date +%F+%T`.out &
+
diff --git a/src/package/execserver/conf/azkaban.properties b/src/package/execserver/conf/azkaban.properties
index f42ea60..b54b0f5 100644
--- a/src/package/execserver/conf/azkaban.properties
+++ b/src/package/execserver/conf/azkaban.properties
@@ -1,6 +1,9 @@
 #Azkaban
 default.timezone.id=America/Los_Angeles
 
+# Azkaban JobTypes Plugins
+azkaban.jobtype.plugin.dir=plugins/jobtypes
+
 #Loader for projects
 executor.global.properties=conf/global.properties
 azkaban.project.dir=projects
@@ -16,4 +19,4 @@ mysql.numconnections=100
 # Azkaban Executor settings
 executor.maxThreads=50
 executor.port=12321
-executor.flow.threads=30
\ No newline at end of file
+executor.flow.threads=30
diff --git a/src/package/soloserver/conf/azkaban-users.xml b/src/package/soloserver/conf/azkaban-users.xml
index b30da41..e19acc8 100644
--- a/src/package/soloserver/conf/azkaban-users.xml
+++ b/src/package/soloserver/conf/azkaban-users.xml
@@ -1,5 +1,7 @@
 <azkaban-users>
 	<user username="azkaban" password="azkaban" roles="admin" groups="azkaban" />
+	<user username="metrics" password="metrics" roles="metrics"/>
 	
 	<role name="admin" permissions="ADMIN" />
+	<role name="metrics" permissions="METRICS"/>
 </azkaban-users>
diff --git a/src/package/triggerserver/bin/azkaban-trigger-shutdown.sh b/src/package/triggerserver/bin/azkaban-trigger-shutdown.sh
new file mode 100755
index 0000000..3dda364
--- /dev/null
+++ b/src/package/triggerserver/bin/azkaban-trigger-shutdown.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+azkaban_dir=$(dirname $0)/..
+
+triggerport=`cat $azkaban_dir/conf/azkaban.properties | grep trigger.port | cut -d = -f 2`
+echo "Shutting down current running AzkabanTriggerServer at port $triggerport"
+
+proc=`cat $azkaban_dir/currentpid`
+
+kill $proc
+
+cat /dev/null > $azkaban_dir/currentpid
diff --git a/src/package/triggerserver/bin/azkaban-trigger-start.sh b/src/package/triggerserver/bin/azkaban-trigger-start.sh
new file mode 100755
index 0000000..00e1077
--- /dev/null
+++ b/src/package/triggerserver/bin/azkaban-trigger-start.sh
@@ -0,0 +1,37 @@
+azkaban_dir=$(dirname $0)/..
+
+if [[ -z "$tmpdir" ]]; then
+tmpdir=temp
+fi
+
+for file in $azkaban_dir/lib/*.jar;
+do
+  CLASSPATH=$CLASSPATH:$file
+done
+
+for file in $azkaban_dir/extlib/*.jar;
+do
+  CLASSPATH=$CLASSPATH:$file
+done
+
+for file in $azkaban_dir/plugins/*/*.jar;
+do
+  CLASSPATH=$CLASSPATH:$file
+done
+
+echo $azkaban_dir;
+echo $CLASSPATH;
+
+triggerport=`cat $azkaban_dir/conf/azkaban.properties | grep trigger.port | cut -d = -f 2`
+echo "Starting AzkabanTriggerServer on port $triggerport ..."
+serverpath=`pwd`
+
+if [ -z $AZKABAN_OPTS ]; then
+  AZKABAN_OPTS=-Xmx3G
+fi
+AZKABAN_OPTS=$AZKABAN_OPTS -server -Dcom.sun.management.jmxremote -Djava.io.tmpdir=$tmpdir -Dtriggerport=$triggerport -Dserverpath=$serverpath
+
+java $AZKABAN_OPTS -cp $CLASSPATH azkaban.triggerapp.AzkabanTriggerServer -conf $azkaban_dir/conf $@ &
+
+echo $! > currentpid
+
diff --git a/src/package/triggerserver/conf/azkaban.private.properties b/src/package/triggerserver/conf/azkaban.private.properties
new file mode 100644
index 0000000..cce1792
--- /dev/null
+++ b/src/package/triggerserver/conf/azkaban.private.properties
@@ -0,0 +1 @@
+# Optional Properties that are hidden to the executions
\ No newline at end of file
diff --git a/src/package/triggerserver/conf/azkaban.properties b/src/package/triggerserver/conf/azkaban.properties
new file mode 100644
index 0000000..3504854
--- /dev/null
+++ b/src/package/triggerserver/conf/azkaban.properties
@@ -0,0 +1,18 @@
+#Azkaban
+default.timezone.id=America/Los_Angeles
+
+#Loader for projects
+azkaban.project.dir=projects
+
+database.type=mysql
+mysql.port=3306
+mysql.host=localhost
+mysql.database=azkaban2
+mysql.user=azkaban
+mysql.password=azkaban
+mysql.numconnections=100
+
+# Azkaban Executor settings
+trigger.server.maxThreads=50
+trigger.server.port=22321
+jetty.hostname=eat1-spadesaz01.grid.linkedin.com
diff --git a/src/package/triggerserver/conf/global.properties b/src/package/triggerserver/conf/global.properties
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/package/triggerserver/conf/global.properties
diff --git a/src/package/webserver/bin/azkaban-web-start.sh b/src/package/webserver/bin/azkaban-web-start.sh
index bdd19e0..fd16e47 100755
--- a/src/package/webserver/bin/azkaban-web-start.sh
+++ b/src/package/webserver/bin/azkaban-web-start.sh
@@ -1,7 +1,7 @@
 azkaban_dir=$(dirname $0)/..
 
 if [[ -z "$tmpdir" ]]; then
-tmpdir=temp
+tmpdir=/tmp
 fi
 
 for file in $azkaban_dir/lib/*.jar;
@@ -19,6 +19,21 @@ do
   CLASSPATH=$CLASSPATH:$file
 done
 
+if [ "HADOOP_HOME" != "" ]; then
+    for file in $HADOOP_HOME/hadoop-core*.jar;
+    do
+        CLASSPATH=$CLASSPATH:$file
+    done
+    CLASSPATH=$CLASSPATH:$HADOOP_HOME/conf
+    JAVA_LIB_PATH="-Djava.library.path=$HADOOP_HOME/lib/native/Linux-amd64-64"
+else
+    echo "Error: HADOOP_HOME is not set. Hadoop job types will not run properly."
+fi
+
+if [ "HIVE_HOME" != "" ]; then
+    CLASSPATH=$CLASSPATH:$HIVE_HOME/conf
+fi
+
 echo $azkaban_dir;
 echo $CLASSPATH;
 
@@ -26,11 +41,11 @@ executorport=`cat $azkaban_dir/conf/azkaban.properties | grep executor.port | cu
 serverpath=`pwd`
 
 if [ -z $AZKABAN_OPTS ]; then
-  AZKABAN_OPTS=-Xmx3G
+  AZKABAN_OPTS="-Xmx4G"
 fi
-AZKABAN_OPTS=$AZKABAN_OPTS -server -Dcom.sun.management.jmxremote -Djava.io.tmpdir=$tmpdir -Dexecutorport=$executorport -Dserverpath=$serverpath
+AZKABAN_OPTS="$AZKABAN_OPTS -server -Dcom.sun.management.jmxremote -Djava.io.tmpdir=$tmpdir -Dexecutorport=$executorport -Dserverpath=$serverpath"
 
-java $AZKABAN_OPTS -cp $CLASSPATH azkaban.webapp.AzkabanWebServer -conf $azkaban_dir/conf $@ &
+java $AZKABAN_OPTS $JAVA_LIB_PATH -cp $CLASSPATH azkaban.webapp.AzkabanWebServer -conf $azkaban_dir/conf $@ &
 
 echo $! > currentpid
 
diff --git a/src/package/webserver/bin/start-web.sh b/src/package/webserver/bin/start-web.sh
new file mode 100644
index 0000000..30d43fa
--- /dev/null
+++ b/src/package/webserver/bin/start-web.sh
@@ -0,0 +1,3 @@
+base_dir=$(dirname $0)/..
+
+bin/azkaban-web-start.sh $base_dir 2>&1>logs/webServerLog_`date +%F+%T`.out &
diff --git a/src/package/webserver/conf/azkaban-users.xml b/src/package/webserver/conf/azkaban-users.xml
index b30da41..e19acc8 100644
--- a/src/package/webserver/conf/azkaban-users.xml
+++ b/src/package/webserver/conf/azkaban-users.xml
@@ -1,5 +1,7 @@
 <azkaban-users>
 	<user username="azkaban" password="azkaban" roles="admin" groups="azkaban" />
+	<user username="metrics" password="metrics" roles="metrics"/>
 	
 	<role name="admin" permissions="ADMIN" />
+	<role name="metrics" permissions="METRICS"/>
 </azkaban-users>
diff --git a/src/sql/create.triggers.sql b/src/sql/create.triggers.sql
new file mode 100644
index 0000000..523ad5a
--- /dev/null
+++ b/src/sql/create.triggers.sql
@@ -0,0 +1,8 @@
+CREATE TABLE triggers (
+	trigger_id INT NOT NULL AUTO_INCREMENT,
+	trigger_source VARCHAR(128),
+	modify_time BIGINT NOT NULL,
+	enc_type TINYINT,
+	data LONGBLOB,
+	PRIMARY KEY (trigger_id)
+);
diff --git a/src/sql/update.execution_logs.2.1.sql b/src/sql/update.execution_logs.2.1.sql
index 5c2dc0b..1760a4b 100644
--- a/src/sql/update.execution_logs.2.1.sql
+++ b/src/sql/update.execution_logs.2.1.sql
@@ -2,6 +2,5 @@ ALTER TABLE execution_logs ADD COLUMN attempt INT DEFAULT 0;
 ALTER TABLE execution_logs ADD COLUMN upload_time BIGINT DEFAULT 1420099200000;
 UPDATE execution_logs SET upload_time=(UNIX_TIMESTAMP()*1000) WHERE upload_time=1420099200000;
 
-ALTER TABLE execution_logs DROP PRIMARY KEY;
 ALTER TABLE execution_logs ADD PRIMARY KEY(exec_id, name, attempt, start_byte);
-ALTER TABLE execution_logs ADD INDEX ex_log_attempt (exec_id, name, attempt)
+ALTER TABLE execution_logs ADD INDEX ex_log_attempt (exec_id, name, attempt);
diff --git a/src/web/js/azkaban.scheduled.view.js b/src/web/js/azkaban.scheduled.view.js
index 4fa92e4..b713783 100644
--- a/src/web/js/azkaban.scheduled.view.js
+++ b/src/web/js/azkaban.scheduled.view.js
@@ -122,10 +122,6 @@ azkaban.ChangeSlaView = Backbone.View.extend({
 							indexToText[i] = "job " + allJobNames[i-1];
 						}
 						
-						
-						
-						
-						
 						// populate with existing settings
 						if(data.settings) {
 							
diff --git a/src/web/js/azkaban.triggers.view.js b/src/web/js/azkaban.triggers.view.js
new file mode 100644
index 0000000..1bff2fd
--- /dev/null
+++ b/src/web/js/azkaban.triggers.view.js
@@ -0,0 +1,361 @@
+$.namespace('azkaban');
+
+function expireTrigger(triggerId) {
+	var triggerURL = contextURL + "/triggers"
+	var redirectURL = contextURL + "/triggers"
+	$.post(
+			triggerURL,
+			{"ajax":"expireTrigger", "triggerId":triggerId},
+			function(data) {
+				if (data.error) {
+//                 alert(data.error)
+					$('#errorMsg').text(data.error);
+				}
+				else {
+// 		 alert("Schedule "+schedId+" removed!")
+					window.location = redirectURL;
+				}
+			},
+			"json"
+	)
+}
+
+function removeSched(scheduleId) {
+	var scheduleURL = contextURL + "/schedule"
+	var redirectURL = contextURL + "/schedule"
+	$.post(
+			scheduleURL,
+			{"action":"removeSched", "scheduleId":scheduleId},
+			function(data) {
+				if (data.error) {
+//                 alert(data.error)
+					$('#errorMsg').text(data.error);
+				}
+				else {
+// 		 alert("Schedule "+schedId+" removed!")
+					window.location = redirectURL;
+				}
+			},
+			"json"
+	)
+}
+
+function removeSla(scheduleId) {
+	var scheduleURL = contextURL + "/schedule"
+	var redirectURL = contextURL + "/schedule"
+	$.post(
+			scheduleURL,
+			{"action":"removeSla", "scheduleId":scheduleId},
+			function(data) {
+				if (data.error) {
+//                 alert(data.error)
+					$('#errorMsg').text(data.error)
+				}
+				else {
+// 		 alert("Schedule "+schedId+" removed!")
+					window.location = redirectURL
+				}
+			},
+			"json"
+	)
+}
+
+azkaban.ChangeSlaView = Backbone.View.extend({
+	events : {
+		"click" : "closeEditingTarget",
+		"click #set-sla-btn": "handleSetSla",	
+		"click #remove-sla-btn": "handleRemoveSla",
+		"click #sla-cancel-btn": "handleSlaCancel",
+		"click .modal-close": "handleSlaCancel",
+		"click #addRow": "handleAddRow"
+	},
+	initialize: function(setting) {
+
+	},
+	handleSlaCancel: function(evt) {
+		console.log("Clicked cancel button");
+		var scheduleURL = contextURL + "/schedule";
+
+		$('#slaModalBackground').hide();
+		$('#sla-options').hide();
+		
+		var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+		var rows = tFlowRules.rows;
+		var rowLength = rows.length
+		for(var i = 0; i < rowLength-1; i++) {
+			tFlowRules.deleteRow(0);
+		}
+		
+	},
+	initFromSched: function(scheduleId, flowName) {
+		this.scheduleId = scheduleId;
+		
+		var scheduleURL = contextURL + "/schedule"
+		this.scheduleURL = scheduleURL;
+		var indexToName = {};
+		var nameToIndex = {};
+		var indexToText = {};
+		this.indexToName = indexToName;
+		this.nameToIndex = nameToIndex;
+		this.indexToText = indexToText;
+		var ruleBoxOptions = ["SUCCESS", "FINISH"];
+		this.ruleBoxOptions = ruleBoxOptions;
+		
+		var fetchScheduleData = {"scheduleId": this.scheduleId, "ajax":"slaInfo"};
+		
+		$.get(
+				this.scheduleURL,
+				fetchScheduleData,
+				function(data) {
+					if (data.error) {
+						alert(data.error);
+					}
+					else {
+						if (data.slaEmails) {
+							$('#slaEmails').val(data.slaEmails.join());
+						}
+						
+						var allJobNames = data.allJobNames;
+						
+						indexToName[0] = "";
+						nameToIndex[flowName] = 0;
+						indexToText[0] = "flow " + flowName;
+						for(var i = 1; i <= allJobNames.length; i++) {
+							indexToName[i] = allJobNames[i-1];
+							nameToIndex[allJobNames[i-1]] = i;
+							indexToText[i] = "job " + allJobNames[i-1];
+						}
+						
+						
+						
+						
+						
+						// populate with existing settings
+						if(data.settings) {
+							
+							var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+							
+							for(var setting in data.settings) {
+								var rFlowRule = tFlowRules.insertRow(0);
+								
+								var cId = rFlowRule.insertCell(-1);
+								var idSelect = document.createElement("select");
+								for(var i in indexToName) {
+									idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
+									if(data.settings[setting].id == indexToName[i]) {
+										idSelect.options[i].selected = true;
+									}
+								}								
+								cId.appendChild(idSelect);
+								
+								var cRule = rFlowRule.insertCell(-1);
+								var ruleSelect = document.createElement("select");
+								for(var i in ruleBoxOptions) {
+									ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
+									if(data.settings[setting].rule == ruleBoxOptions[i]) {
+										ruleSelect.options[i].selected = true;
+									}
+								}
+								cRule.appendChild(ruleSelect);
+								
+								var cDuration = rFlowRule.insertCell(-1);
+								var duration = document.createElement("input");
+								duration.type = "text";
+								duration.setAttribute("class", "durationpick");
+								var rawMinutes = data.settings[setting].duration;
+								var intMinutes = rawMinutes.substring(0, rawMinutes.length-1);
+								var minutes = parseInt(intMinutes);
+								var hours = Math.floor(minutes / 60);
+								minutes = minutes % 60;
+								duration.value = hours + ":" + minutes;
+								cDuration.appendChild(duration);
+
+								var cEmail = rFlowRule.insertCell(-1);
+								var emailCheck = document.createElement("input");
+								emailCheck.type = "checkbox";
+								for(var act in data.settings[setting].actions) {
+									if(data.settings[setting].actions[act] == "EMAIL") {
+										emailCheck.checked = true;
+									}
+								}
+								cEmail.appendChild(emailCheck);
+								
+								var cKill = rFlowRule.insertCell(-1);
+								var killCheck = document.createElement("input");
+								killCheck.type = "checkbox";
+								for(var act in data.settings[setting].actions) {
+									if(data.settings[setting].actions[act] == "KILL") {
+										killCheck.checked = true;
+									}
+								}
+								cKill.appendChild(killCheck);
+								
+								$('.durationpick').timepicker({hourMax: 99});
+							}
+						}
+						$('.durationpick').timepicker({hourMax: 99});
+					}
+				},
+				"json"
+			);
+		
+		$('#slaModalBackground').show();
+		$('#sla-options').show();
+		
+//		this.schedFlowOptions = sched.flowOptions
+		console.log("Loaded schedule info. Ready to set SLA.");
+
+	},
+	handleRemoveSla: function(evt) {
+		console.log("Clicked remove sla button");
+		var scheduleURL = this.scheduleURL;
+		var redirectURL = this.scheduleURL;
+		$.post(
+				scheduleURL,
+				{"action":"removeSla", "scheduleId":this.scheduleId},
+				function(data) {
+				if (data.error) {
+						$('#errorMsg').text(data.error)
+					}
+					else {
+						window.location = redirectURL
+					}
+				"json"
+				}
+			);
+
+	},
+	handleSetSla: function(evt) {
+
+		var slaEmails = $('#slaEmails').val();
+		var settings = {};
+		
+		
+		var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+		for(var row = 0; row < tFlowRules.rows.length-1; row++) {
+			var rFlowRule = tFlowRules.rows[row];
+			var id = rFlowRule.cells[0].firstChild.value;
+			var rule = rFlowRule.cells[1].firstChild.value;
+			var duration = rFlowRule.cells[2].firstChild.value;
+			var email = rFlowRule.cells[3].firstChild.checked;
+			var kill = rFlowRule.cells[4].firstChild.checked;
+			settings[row] = id + "," + rule + "," + duration + "," + email + "," + kill; 
+		}
+
+		var slaData = {
+			scheduleId: this.scheduleId,
+			ajax: "setSla",			
+			slaEmails: slaEmails,
+			settings: settings
+		};
+
+		var scheduleURL = this.scheduleURL;
+		
+		$.post(
+			scheduleURL,
+			slaData,
+			function(data) {
+				if (data.error) {
+					alert(data.error);
+				}
+				else {
+					tFlowRules.length = 0;
+					window.location = scheduleURL;
+				}
+			},
+			"json"
+		);
+	},
+	handleAddRow: function(evt) {
+		
+		var indexToName = this.indexToName;
+		var nameToIndex = this.nameToIndex;
+		var indexToText = this.indexToText;
+		var ruleBoxOptions = this.ruleBoxOptions;
+
+		var tFlowRules = document.getElementById("flowRulesTbl").tBodies[0];
+		var rFlowRule = tFlowRules.insertRow(tFlowRules.rows.length-1);
+		
+		var cId = rFlowRule.insertCell(-1);
+		var idSelect = document.createElement("select");
+		for(var i in indexToName) {
+			idSelect.options[i] = new Option(indexToText[i], indexToName[i]);
+		}
+		
+		cId.appendChild(idSelect);
+		
+		var cRule = rFlowRule.insertCell(-1);
+		var ruleSelect = document.createElement("select");
+		for(var i in ruleBoxOptions) {
+			ruleSelect.options[i] = new Option(ruleBoxOptions[i], ruleBoxOptions[i]);
+		}
+		cRule.appendChild(ruleSelect);
+		
+		var cDuration = rFlowRule.insertCell(-1);
+		var duration = document.createElement("input");
+		duration.type = "text";
+		duration.setAttribute("class", "durationpick");
+		cDuration.appendChild(duration);
+
+		var cEmail = rFlowRule.insertCell(-1);
+		var emailCheck = document.createElement("input");
+		emailCheck.type = "checkbox";
+		cEmail.appendChild(emailCheck);
+		
+		var cKill = rFlowRule.insertCell(-1);
+		var killCheck = document.createElement("input");
+		killCheck.type = "checkbox";
+		cKill.appendChild(killCheck);
+		
+		$('.durationpick').timepicker({hourMax: 99});
+
+		return rFlowRule;
+	},
+	handleEditColumn : function(evt) {
+		var curTarget = evt.currentTarget;
+	
+		if (this.editingTarget != curTarget) {
+			this.closeEditingTarget();
+			
+			var text = $(curTarget).children(".spanValue").text();
+			$(curTarget).empty();
+						
+			var input = document.createElement("input");
+			$(input).attr("type", "text");
+			$(input).css("width", "100%");
+			$(input).val(text);
+			$(curTarget).addClass("editing");
+			$(curTarget).append(input);
+			$(input).focus();
+			this.editingTarget = curTarget;
+		}
+	},
+	handleRemoveColumn : function(evt) {
+		var curTarget = evt.currentTarget;
+		// Should be the table
+		var row = curTarget.parentElement.parentElement;
+		$(row).remove();
+	},
+	closeEditingTarget: function(evt) {
+
+	}
+});
+
+var slaView;
+var tableSorterView;
+$(function() {
+	var selected;
+
+
+	slaView = new azkaban.ChangeSlaView({el:$('#sla-options')});
+	tableSorterView = new azkaban.TableSorter({el:$('#scheduledFlowsTbl')});
+//	var requestURL = contextURL + "/manager";
+
+	// Set up the Flow options view. Create a new one every time :p
+//	 $('#addSlaBtn').click( function() {
+//		 slaView.show();
+//	 });
+
+	 
+	
+});
\ No newline at end of file
diff --git a/unit/java/azkaban/test/trigger/BasicTimeCheckerTest.java b/unit/java/azkaban/test/trigger/BasicTimeCheckerTest.java
new file mode 100644
index 0000000..dc9b970
--- /dev/null
+++ b/unit/java/azkaban/test/trigger/BasicTimeCheckerTest.java
@@ -0,0 +1,63 @@
+package azkaban.test.trigger;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.joda.time.DateTime;
+import org.joda.time.ReadablePeriod;
+import org.junit.Test;
+
+import azkaban.trigger.Condition;
+import azkaban.trigger.ConditionChecker;
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.utils.Utils;
+
+public class BasicTimeCheckerTest {
+
+	@Test
+	public void basicTimerTest(){
+		
+		Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+		
+		// get a new timechecker, start from now, repeat every minute. should evaluate to false now, and true a minute later.
+		DateTime now = DateTime.now();
+		ReadablePeriod period = Utils.parsePeriodString("10s");
+		
+		BasicTimeChecker timeChecker = new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(), now.getZone(), true, true, period);
+		checkers.put(timeChecker.getId(), timeChecker);
+		String expr = timeChecker.getId() + ".eval()";
+		
+		Condition cond = new Condition(checkers, expr);
+		System.out.println(expr);
+		
+		assertFalse(cond.isMet());
+		
+		//sleep for 1 min
+		try {
+			Thread.sleep(10000);
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		
+		assertTrue(cond.isMet());
+		
+		cond.resetCheckers();
+		
+		assertFalse(cond.isMet());
+		
+		//sleep for 1 min
+		try {
+			Thread.sleep(10000);
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		
+		assertTrue(cond.isMet());
+		
+	}
+}
diff --git a/unit/java/azkaban/test/trigger/ConditionTest.java b/unit/java/azkaban/test/trigger/ConditionTest.java
new file mode 100644
index 0000000..c56edab
--- /dev/null
+++ b/unit/java/azkaban/test/trigger/ConditionTest.java
@@ -0,0 +1,92 @@
+package azkaban.test.trigger;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+
+import org.joda.time.DateTime;
+import org.junit.Test;
+
+import azkaban.trigger.CheckerTypeLoader;
+import azkaban.trigger.Condition;
+import azkaban.trigger.ConditionChecker;
+import azkaban.trigger.TriggerException;
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
+
+public class ConditionTest {
+	
+	@Test
+	public void conditionTest(){
+		
+		Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+
+		ThresholdChecker fake1 = new ThresholdChecker("thresholdchecker1", 10);
+		ThresholdChecker fake2 = new ThresholdChecker("thresholdchecker2", 20);
+		ThresholdChecker.setVal(15);
+		checkers.put(fake1.getId(), fake1);
+		checkers.put(fake2.getId(), fake2);
+
+		String expr1 = "( " + fake1.getId()+ ".eval()" + " && " + fake2.getId()+ ".eval()" + " )" + " || " + "( " + fake1.getId()+".eval()" + " && " + "!" + fake2.getId()+".eval()" + " )";
+		String expr2 = "( " + fake1.getId()+ ".eval()" + " && " + fake2.getId()+ ".eval()" + " )" + " || " + "( " + fake1.getId()+".eval()" + " && " + fake2.getId()+".eval()" + " )";
+
+		Condition cond = new Condition(checkers, expr1);
+
+		System.out.println("Setting expression " + expr1);
+		assertTrue(cond.isMet());
+		cond.setExpression(expr2);
+		System.out.println("Setting expression " + expr2);
+		assertFalse(cond.isMet());
+		
+	}
+	
+	@Test
+	public void jsonConversionTest() throws Exception {
+		
+		CheckerTypeLoader checkerTypeLoader = new CheckerTypeLoader();
+		checkerTypeLoader.init(new Props());
+		Condition.setCheckerLoader(checkerTypeLoader);
+
+		Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+		
+		// get a new timechecker, start from now, repeat every minute. should evaluate to false now, and true a minute later.
+		DateTime now = DateTime.now();
+		String period = "6s";
+		
+		//BasicTimeChecker timeChecker = new BasicTimeChecker(now, true, true, period);
+		ConditionChecker timeChecker = new BasicTimeChecker("BasicTimeChecker_1", now.getMillis(), now.getZone(), true, true, Utils.parsePeriodString(period));
+		System.out.println("checker id is " + timeChecker.getId());
+		
+		checkers.put(timeChecker.getId(), timeChecker);
+		String expr = timeChecker.getId() + ".eval()";
+		
+		Condition cond = new Condition(checkers, expr);
+		
+		File temp = File.createTempFile("temptest", "temptest");
+		temp.deleteOnExit();
+		Object obj = cond.toJson();
+		JSONUtils.toJSON(obj, temp);
+		
+		Condition cond2 = Condition.fromJson(JSONUtils.parseJSONFromFile(temp));
+		
+		Map<String, ConditionChecker> checkers1 = cond.getCheckers();
+		Map<String, ConditionChecker> checkers2 = cond2.getCheckers();
+		
+		assertTrue(cond.getExpression().equals(cond2.getExpression()));
+		System.out.println("cond1: " + cond.getExpression());
+		System.out.println("cond2: " + cond2.getExpression());
+		assertTrue(checkers2.size() == 1);
+		ConditionChecker checker2 = checkers2.get(timeChecker.getId());
+		//assertTrue(checker2.getId().equals(timeChecker.getId()));
+		System.out.println("checker1: " + timeChecker.getId());
+		System.out.println("checker2: " + checker2.getId());
+		assertTrue(timeChecker.getId().equals(checker2.getId()));
+	}
+	
+}
diff --git a/unit/java/azkaban/test/trigger/DummyTriggerAction.java b/unit/java/azkaban/test/trigger/DummyTriggerAction.java
new file mode 100644
index 0000000..cffbed6
--- /dev/null
+++ b/unit/java/azkaban/test/trigger/DummyTriggerAction.java
@@ -0,0 +1,56 @@
+package azkaban.test.trigger;
+
+import java.util.Map;
+
+import azkaban.trigger.TriggerAction;
+
+public class DummyTriggerAction implements TriggerAction{
+
+	public static final String type = "DummyAction";
+	
+	private String message;
+	
+	public DummyTriggerAction(String message) {
+		this.message = message;
+	}
+	
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@Override
+	public TriggerAction fromJson(Object obj) {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public Object toJson() {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public void doAction() {
+		System.out.println(getType() + " invoked.");
+		System.out.println(message);
+	}
+
+	@Override
+	public String getDescription() {
+		return "this is real dummy action";
+	}
+
+	@Override
+	public String getId() {
+		return null;
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+		// TODO Auto-generated method stub
+		
+	}
+
+}
diff --git a/unit/java/azkaban/test/trigger/ExecuteFlowActionTest.java b/unit/java/azkaban/test/trigger/ExecuteFlowActionTest.java
new file mode 100644
index 0000000..4f1bb09
--- /dev/null
+++ b/unit/java/azkaban/test/trigger/ExecuteFlowActionTest.java
@@ -0,0 +1,42 @@
+package azkaban.test.trigger;
+
+import static org.junit.Assert.*;
+
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.junit.Test;
+
+import azkaban.executor.ExecutionOptions;
+import azkaban.trigger.ActionTypeLoader;
+import azkaban.trigger.TriggerAction;
+import azkaban.trigger.TriggerException;
+import azkaban.trigger.builtin.ExecuteFlowAction;
+import azkaban.utils.Props;
+
+
+public class ExecuteFlowActionTest {
+	
+	@Test
+	public void jsonConversionTest() throws Exception {
+		ActionTypeLoader loader = new ActionTypeLoader();
+		loader.init(new Props());
+		
+		ExecutionOptions options = new ExecutionOptions();
+		List<String> disabledJobs = new ArrayList<String>();
+		options.setDisabledJobs(disabledJobs);
+		
+		ExecuteFlowAction executeFlowAction = new ExecuteFlowAction("ExecuteFlowAction", 1, "testproject", "testflow", "azkaban", options, null);
+		
+		Object obj = executeFlowAction.toJson();
+		
+		ExecuteFlowAction action = (ExecuteFlowAction) loader.createActionFromJson(ExecuteFlowAction.type, obj);
+		assertTrue(executeFlowAction.getProjectId() == action.getProjectId());
+		assertTrue(executeFlowAction.getFlowName().equals(action.getFlowName()));
+		assertTrue(executeFlowAction.getSubmitUser().equals(action.getSubmitUser()));
+	}
+
+	
+	
+}
diff --git a/unit/java/azkaban/test/trigger/ThresholdChecker.java b/unit/java/azkaban/test/trigger/ThresholdChecker.java
new file mode 100644
index 0000000..4e6e85c
--- /dev/null
+++ b/unit/java/azkaban/test/trigger/ThresholdChecker.java
@@ -0,0 +1,105 @@
+package azkaban.test.trigger;
+
+import java.util.Map;
+
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+
+import azkaban.trigger.ConditionChecker;
+
+
+public class ThresholdChecker implements ConditionChecker{
+	
+	private int threshold = -1; 
+	
+	private static int curVal = -1;
+	
+	public static final String type = "ThresholdChecker";
+	
+	private String id;
+	
+	private boolean checkerMet = false;
+	private boolean checkerReset  = false;
+	
+	public ThresholdChecker(String id, int threshold){
+		this.id = id;
+		this.threshold = threshold;
+	}
+	
+	public synchronized static void setVal(int val) {
+		curVal = val;
+	}
+	
+	@Override
+	public Boolean eval() {
+		if(curVal > threshold) {
+			checkerMet = true;
+		}
+		return checkerMet;
+	}
+	
+	public boolean isCheckerMet() {
+		return checkerMet;
+	}
+
+	@Override
+	public void reset() {
+		checkerMet = false;
+		checkerReset = true;
+	}
+	
+	public boolean isCheckerReset() {
+		return checkerReset;
+	}
+	
+	/*
+	 * TimeChecker format:
+	 * type_first-time-in-millis_next-time-in-millis_timezone_is-recurring_skip-past-checks_period
+	 */
+	@Override
+	public String getId() {
+		return id;
+	}
+
+	@Override
+	public String getType() {
+		return type;
+	}
+
+	@Override
+	public ConditionChecker fromJson(Object obj) {
+		return null;
+	}
+
+	@Override
+	public Object getNum() {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public Object toJson() {
+		// TODO Auto-generated method stub
+		return null;
+	}
+
+	@Override
+	public void stopChecker() {
+		return;
+		
+	}
+
+	@Override
+	public void setContext(Map<String, Object> context) {
+		// TODO Auto-generated method stub
+		
+	}
+
+	@Override
+	public long getNextCheckTime() {
+		// TODO Auto-generated method stub
+		return 0;
+	}
+
+
+}
diff --git a/unit/java/azkaban/test/trigger/TriggerManagerTest.java b/unit/java/azkaban/test/trigger/TriggerManagerTest.java
new file mode 100644
index 0000000..fbd5da7
--- /dev/null
+++ b/unit/java/azkaban/test/trigger/TriggerManagerTest.java
@@ -0,0 +1,192 @@
+package azkaban.test.trigger;
+
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.joda.time.DateTime;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import azkaban.trigger.Condition;
+import azkaban.trigger.ConditionChecker;
+import azkaban.trigger.CheckerTypeLoader;
+import azkaban.trigger.Trigger;
+import azkaban.trigger.TriggerAction;
+import azkaban.trigger.ActionTypeLoader;
+import azkaban.trigger.TriggerException;
+import azkaban.trigger.TriggerLoader;
+import azkaban.trigger.TriggerLoaderException;
+import azkaban.trigger.TriggerManager;
+import azkaban.trigger.TriggerManagerException;
+import azkaban.utils.Props;
+
+public class TriggerManagerTest {
+	
+	private TriggerLoader triggerLoader;
+	
+	@Before
+	public void setup() throws TriggerException, TriggerManagerException {
+		triggerLoader = new MockTriggerLoader();
+		
+		
+	}
+	
+	@After
+	public void tearDown() {
+		
+	}
+	
+	@Test
+	public void TriggerManagerSimpleTest() throws TriggerManagerException {
+
+		
+		Props props = new Props();
+		props.put("trigger.scan.interval", 4000);
+		TriggerManager triggerManager = new TriggerManager(props, triggerLoader);
+		
+		triggerManager.registerCheckerType(ThresholdChecker.type, ThresholdChecker.class);
+		triggerManager.registerActionType(DummyTriggerAction.type, DummyTriggerAction.class);
+		
+		ThresholdChecker.setVal(1);
+		
+		triggerManager.insertTrigger(createDummyTrigger("test1", "triggerLoader", 10), "testUser");
+		List<Trigger> triggers = triggerManager.getTriggers();
+		assertTrue(triggers.size() == 1);
+		Trigger t1 = triggers.get(0);
+		t1.setResetOnTrigger(false);
+		triggerManager.updateTrigger(t1, "testUser");
+		ThresholdChecker checker1 = (ThresholdChecker) t1.getTriggerCondition().getCheckers().values().toArray()[0];
+		assertTrue(t1.getSource().equals("triggerLoader"));
+		
+		Trigger t2 = createDummyTrigger("test2: add new trigger", "addNewTriggerTest", 20);
+		triggerManager.insertTrigger(t2, "testUser");
+		ThresholdChecker checker2 = (ThresholdChecker) t2.getTriggerCondition().getCheckers().values().toArray()[0];
+		
+		ThresholdChecker.setVal(15);
+		try {
+			Thread.sleep(2000);
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		
+		assertTrue(checker1.isCheckerMet() == false);
+		assertTrue(checker2.isCheckerMet() == false);
+		assertTrue(checker1.isCheckerReset() == false);
+		assertTrue(checker2.isCheckerReset() == false);
+		
+		try {
+			Thread.sleep(2000);
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		
+		assertTrue(checker1.isCheckerMet() == true);
+		assertTrue(checker2.isCheckerMet() == false);
+		assertTrue(checker1.isCheckerReset() == false);
+		assertTrue(checker2.isCheckerReset() == false);
+		
+		ThresholdChecker.setVal(25);
+		try {
+			Thread.sleep(4000);
+		} catch (InterruptedException e) {
+			// TODO Auto-generated catch block
+			e.printStackTrace();
+		}
+		
+		assertTrue(checker1.isCheckerMet() == true);
+		assertTrue(checker1.isCheckerReset() == false);
+		assertTrue(checker2.isCheckerReset() == true);
+		
+		triggers = triggerManager.getTriggers();
+		assertTrue(triggers.size() == 1);
+		
+	}
+	
+	public class MockTriggerLoader implements TriggerLoader {
+
+		private Map<Integer, Trigger> triggers = new HashMap<Integer, Trigger>();
+		private int idIndex = 0;
+		
+		@Override
+		public void addTrigger(Trigger t) throws TriggerLoaderException {
+			t.setTriggerId(idIndex++);
+			triggers.put(t.getTriggerId(), t);
+		}
+
+		@Override
+		public void removeTrigger(Trigger s) throws TriggerLoaderException {
+			triggers.remove(s.getTriggerId());
+			
+		}
+
+		@Override
+		public void updateTrigger(Trigger t) throws TriggerLoaderException {
+			triggers.put(t.getTriggerId(), t);
+		}
+
+		@Override
+		public List<Trigger> loadTriggers() {
+			return new ArrayList<Trigger>(triggers.values());
+		}
+
+		@Override
+		public Trigger loadTrigger(int triggerId)
+				throws TriggerLoaderException {
+			// TODO Auto-generated method stub
+			return null;
+		}
+
+		@Override
+		public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+				throws TriggerLoaderException {
+			// TODO Auto-generated method stub
+			return null;
+		}
+		
+	}
+	
+	private Trigger createDummyTrigger(String message, String source, int threshold) {
+		
+		Map<String, ConditionChecker> checkers = new HashMap<String, ConditionChecker>();
+		ConditionChecker checker = new ThresholdChecker(ThresholdChecker.type, threshold);
+		checkers.put(checker.getId(), checker);
+		
+		List<TriggerAction> actions = new ArrayList<TriggerAction>();
+		TriggerAction act  = new DummyTriggerAction(message);
+		actions.add(act);
+		
+		String expr = checker.getId() + ".eval()";
+		
+		Condition triggerCond = new Condition(checkers, expr);
+		Condition expireCond = new Condition(checkers, expr);
+		
+		Trigger fakeTrigger = new Trigger(DateTime.now().getMillis(), DateTime.now().getMillis(), "azkaban", source, triggerCond, expireCond, actions);
+		fakeTrigger.setResetOnTrigger(true);
+		fakeTrigger.setResetOnExpire(true);
+		
+		return fakeTrigger;
+	}
+
+//	public class MockCheckerLoader extends CheckerTypeLoader{
+//		
+//		@Override
+//		public void init(Props props) {
+//			checkerToClass.put(ThresholdChecker.type, ThresholdChecker.class);
+//		}
+//	}
+//	
+//	public class MockActionLoader extends ActionTypeLoader {
+//		@Override
+//		public void init(Props props) {
+//			actionToClass.put(DummyTriggerAction.type, DummyTriggerAction.class);
+//		}
+//	}
+
+}
diff --git a/unit/java/azkaban/test/trigger/TriggerTest.java b/unit/java/azkaban/test/trigger/TriggerTest.java
new file mode 100644
index 0000000..22f6532
--- /dev/null
+++ b/unit/java/azkaban/test/trigger/TriggerTest.java
@@ -0,0 +1,71 @@
+package azkaban.test.trigger;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.joda.time.DateTime;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+import azkaban.executor.ExecutionOptions;
+import azkaban.trigger.ActionTypeLoader;
+import azkaban.trigger.CheckerTypeLoader;
+import azkaban.trigger.Condition;
+import azkaban.trigger.ConditionChecker;
+import azkaban.trigger.Trigger;
+import azkaban.trigger.TriggerAction;
+import azkaban.trigger.TriggerException;
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.trigger.builtin.ExecuteFlowAction;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
+
+public class TriggerTest {
+	
+	private CheckerTypeLoader checkerLoader;
+	private ActionTypeLoader actionLoader;
+	
+	@Before
+	public void setup() throws TriggerException {
+		checkerLoader = new CheckerTypeLoader();
+		checkerLoader.init(new Props());
+		Condition.setCheckerLoader(checkerLoader);
+		actionLoader = new ActionTypeLoader();
+		actionLoader.init(new Props());
+		Trigger.setActionTypeLoader(actionLoader);
+	}
+	
+	@Test
+	public void jsonConversionTest() throws Exception {
+		DateTime now = DateTime.now();
+		ConditionChecker checker1 = new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(), true, true, Utils.parsePeriodString("1h"));
+		Map<String, ConditionChecker> checkers1 = new HashMap<String, ConditionChecker>();
+		checkers1.put(checker1.getId(), checker1);
+		String expr1 = checker1.getId() + ".eval()";
+		Condition triggerCond = new Condition(checkers1, expr1);
+		Condition expireCond = new Condition(checkers1, expr1);
+		List<TriggerAction> actions = new ArrayList<TriggerAction>();
+		TriggerAction action = new ExecuteFlowAction("executeAction", 1, "testProj", "testFlow", "azkaban", new ExecutionOptions(), null);
+		actions.add(action);
+		Trigger t = new Trigger(now.getMillis(), now.getMillis(), "azkaban", "test", triggerCond, expireCond, actions);
+		
+		File temp = File.createTempFile("temptest", "temptest");
+		temp.deleteOnExit();
+		Object obj = t.toJson();
+		JSONUtils.toJSON(obj, temp);
+		
+		Trigger t2 = Trigger.fromJson(JSONUtils.parseJSONFromFile(temp));
+		
+		assertTrue(t.getSource().equals(t2.getSource()));
+		assertTrue(t.getTriggerId() == t2.getTriggerId());
+		
+	}
+
+}