azkaban-aplcache

reformat everything excluding files under development (#1137) reformat

6/1/2017 3:52:46 PM

Changes

build.gradle 144(+72 -72)

Details

diff --git a/azkaban-common/build.gradle b/azkaban-common/build.gradle
index 3bef370..eabcff2 100644
--- a/azkaban-common/build.gradle
+++ b/azkaban-common/build.gradle
@@ -17,63 +17,63 @@
 
 apply plugin: 'c'
 model {
-  components {
-    main(NativeExecutableSpec) {
-      sources {
-        c {
-          source {
-            srcDir "src/main"
-            include "**/*.c"
-          }
+    components {
+        main(NativeExecutableSpec) {
+            sources {
+                c {
+                    source {
+                        srcDir "src/main"
+                        include "**/*.c"
+                    }
+                }
+            }
         }
-      }
     }
-  }
 }
 
 dependencies {
-  compile project(':azkaban-spi')
-  compile project(':azkaban-db')
+    compile project(':azkaban-spi')
+    compile project(':azkaban-db')
 
-  compile "org.apache.hadoop:hadoop-auth:$hadoopVersion"
-  compile "org.apache.hadoop:hadoop-annotations:$hadoopVersion"
-  compile "org.apache.hadoop:hadoop-common:$hadoopVersion"
-  compile "org.apache.hadoop:hadoop-hdfs:$hadoopVersion"
-  compile('com.google.inject:guice:4.1.0')
-  compile('com.google.guava:guava:21.0')
-  compile('commons-collections:commons-collections:3.2.2')
-  compile('org.apache.commons:commons-dbcp2:2.1.1')
-  compile('commons-dbutils:commons-dbutils:1.5')
-  compile('commons-fileupload:commons-fileupload:1.2.1')
-  compile('commons-io:commons-io:2.4')
-  compile('commons-lang:commons-lang:2.6')
-  compile('javax.mail:mail:1.4.5')
-  compile('joda-time:joda-time:2.0')
-  compile('log4j:log4j:1.2.16')
-  compile('mysql:mysql-connector-java:5.1.28')
-  compile('net.sf.jopt-simple:jopt-simple:4.3')
-  compile('org.apache.commons:commons-jexl:2.1.1')
-  compile('org.apache.commons:commons-math3:3.0')
-  compile('org.apache.httpcomponents:httpclient:4.5.2')
-  compile('org.apache.httpcomponents:httpcore:4.4.5')
-  compile('org.apache.velocity:velocity:1.7')
-  compile('org.codehaus.jackson:jackson-core-asl:1.9.5')
-  compile('org.codehaus.jackson:jackson-mapper-asl:1.9.5')
-  compile('org.mortbay.jetty:jetty:6.1.26')
-  compile('org.mortbay.jetty:jetty-util:6.1.26')
-  compile('org.quartz-scheduler:quartz:2.2.1')
-  compile('org.yaml:snakeyaml:1.18')
-  compile('io.dropwizard.metrics:metrics-core:3.1.0')
-  compile('io.dropwizard.metrics:metrics-jvm:3.1.0')
+    compile "org.apache.hadoop:hadoop-auth:$hadoopVersion"
+    compile "org.apache.hadoop:hadoop-annotations:$hadoopVersion"
+    compile "org.apache.hadoop:hadoop-common:$hadoopVersion"
+    compile "org.apache.hadoop:hadoop-hdfs:$hadoopVersion"
+    compile('com.google.inject:guice:4.1.0')
+    compile('com.google.guava:guava:21.0')
+    compile('commons-collections:commons-collections:3.2.2')
+    compile('org.apache.commons:commons-dbcp2:2.1.1')
+    compile('commons-dbutils:commons-dbutils:1.5')
+    compile('commons-fileupload:commons-fileupload:1.2.1')
+    compile('commons-io:commons-io:2.4')
+    compile('commons-lang:commons-lang:2.6')
+    compile('javax.mail:mail:1.4.5')
+    compile('joda-time:joda-time:2.0')
+    compile('log4j:log4j:1.2.16')
+    compile('mysql:mysql-connector-java:5.1.28')
+    compile('net.sf.jopt-simple:jopt-simple:4.3')
+    compile('org.apache.commons:commons-jexl:2.1.1')
+    compile('org.apache.commons:commons-math3:3.0')
+    compile('org.apache.httpcomponents:httpclient:4.5.2')
+    compile('org.apache.httpcomponents:httpcore:4.4.5')
+    compile('org.apache.velocity:velocity:1.7')
+    compile('org.codehaus.jackson:jackson-core-asl:1.9.5')
+    compile('org.codehaus.jackson:jackson-mapper-asl:1.9.5')
+    compile('org.mortbay.jetty:jetty:6.1.26')
+    compile('org.mortbay.jetty:jetty-util:6.1.26')
+    compile('org.quartz-scheduler:quartz:2.2.1')
+    compile('org.yaml:snakeyaml:1.18')
+    compile('io.dropwizard.metrics:metrics-core:3.1.0')
+    compile('io.dropwizard.metrics:metrics-jvm:3.1.0')
 
-  testCompile('org.hamcrest:hamcrest-all:1.3')
-  testCompile('org.mockito:mockito-all:1.10.19')
-  testCompile(project(':azkaban-test').sourceSets.test.output)
+    testCompile('org.hamcrest:hamcrest-all:1.3')
+    testCompile('org.mockito:mockito-all:1.10.19')
+    testCompile(project(':azkaban-test').sourceSets.test.output)
 
-  testRuntime('com.h2database:h2:1.4.193')
-  testRuntime('org.slf4j:slf4j-log4j12:1.7.18')
+    testRuntime('com.h2database:h2:1.4.193')
+    testRuntime('org.slf4j:slf4j-log4j12:1.7.18')
 }
 
 tasks.withType(JavaCompile) {
-  options.encoding = "UTF-8"
+    options.encoding = "UTF-8"
 }
diff --git a/azkaban-common/src/main/java/azkaban/alert/Alerter.java b/azkaban-common/src/main/java/azkaban/alert/Alerter.java
index 196cb0c..07a6159 100644
--- a/azkaban-common/src/main/java/azkaban/alert/Alerter.java
+++ b/azkaban-common/src/main/java/azkaban/alert/Alerter.java
@@ -20,8 +20,10 @@ import azkaban.executor.ExecutableFlow;
 import azkaban.sla.SlaOption;
 
 public interface Alerter {
+
   void alertOnSuccess(ExecutableFlow exflow) throws Exception;
-  void alertOnError(ExecutableFlow exflow, String ... extraReasons) throws Exception;
+
+  void alertOnError(ExecutableFlow exflow, String... extraReasons) throws Exception;
 
   void alertOnFirstError(ExecutableFlow exflow) throws Exception;
 
diff --git a/azkaban-common/src/main/java/azkaban/AzkabanCommonModule.java b/azkaban-common/src/main/java/azkaban/AzkabanCommonModule.java
index ec91f67..9c14c2a 100644
--- a/azkaban-common/src/main/java/azkaban/AzkabanCommonModule.java
+++ b/azkaban-common/src/main/java/azkaban/AzkabanCommonModule.java
@@ -16,6 +16,10 @@
  */
 package azkaban;
 
+import static azkaban.Constants.ConfigurationKeys.HADOOP_CONF_DIR_PATH;
+import static com.google.common.base.Preconditions.checkArgument;
+import static java.util.Objects.requireNonNull;
+
 import azkaban.db.AzkabanDataSource;
 import azkaban.db.DatabaseOperator;
 import azkaban.db.DatabaseOperatorImpl;
@@ -50,23 +54,20 @@ import org.apache.hadoop.fs.FileSystem;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static azkaban.Constants.ConfigurationKeys.*;
-import static com.google.common.base.Preconditions.*;
-import static java.util.Objects.*;
-
 
 /**
- * This Guice module is currently a one place container for all bindings in the current module. This is intended to
- * help during the migration process to Guice. Once this class starts growing we can move towards more modular
- * structuring of Guice components.
+ * This Guice module is currently a one place container for all bindings in the current module. This
+ * is intended to help during the migration process to Guice. Once this class starts growing we can
+ * move towards more modular structuring of Guice components.
  */
 public class AzkabanCommonModule extends AbstractModule {
+
   private static final Logger log = LoggerFactory.getLogger(AzkabanCommonModule.class);
 
   private final Props props;
   private final AzkabanCommonModuleConfig config;
 
-  public AzkabanCommonModule(Props props) {
+  public AzkabanCommonModule(final Props props) {
     this.props = props;
     this.config = new AzkabanCommonModuleConfig(props);
   }
@@ -75,7 +76,7 @@ public class AzkabanCommonModule extends AbstractModule {
   protected void configure() {
     bind(ExecutorLoader.class).to(JdbcExecutorLoader.class).in(Scopes.SINGLETON);
     bind(ProjectLoader.class).to(JdbcProjectLoader.class).in(Scopes.SINGLETON);
-    bind(Props.class).toInstance(config.getProps());
+    bind(Props.class).toInstance(this.config.getProps());
     bind(Storage.class).to(resolveStorageClassType()).in(Scopes.SINGLETON);
     bind(DatabaseOperator.class).to(DatabaseOperatorImpl.class).in(Scopes.SINGLETON);
     bind(TriggerLoader.class).to(JdbcTriggerImpl.class).in(Scopes.SINGLETON);
@@ -85,19 +86,19 @@ public class AzkabanCommonModule extends AbstractModule {
   }
 
   public Class<? extends Storage> resolveStorageClassType() {
-    final StorageImplementationType type = StorageImplementationType.from(config.getStorageImplementation());
+    final StorageImplementationType type = StorageImplementationType
+        .from(this.config.getStorageImplementation());
     if (type != null) {
       return type.getImplementationClass();
     } else {
-      return loadCustomStorageClass(config.getStorageImplementation());
+      return loadCustomStorageClass(this.config.getStorageImplementation());
     }
   }
 
-  @SuppressWarnings("unchecked")
-  private Class<? extends Storage> loadCustomStorageClass(String storageImplementation) {
+  private Class<? extends Storage> loadCustomStorageClass(final String storageImplementation) {
     try {
       return (Class<? extends Storage>) Class.forName(storageImplementation);
-    } catch (ClassNotFoundException e) {
+    } catch (final ClassNotFoundException e) {
       throw new StorageException(e);
     }
   }
@@ -106,21 +107,21 @@ public class AzkabanCommonModule extends AbstractModule {
   @Inject
   @Provides
   @Singleton
-  public AzkabanDataSource getDataSource(Props props) {
-    String databaseType = props.getString("database.type");
+  public AzkabanDataSource getDataSource(final Props props) {
+    final String databaseType = props.getString("database.type");
 
-    if(databaseType.equals("h2")) {
-      String path = props.getString("h2.path");
-      Path h2DbPath = Paths.get(path).toAbsolutePath();
+    if (databaseType.equals("h2")) {
+      final String path = props.getString("h2.path");
+      final Path h2DbPath = Paths.get(path).toAbsolutePath();
       log.info("h2 DB path: " + h2DbPath);
       return new H2FileDataSource(h2DbPath);
     }
-    int port = props.getInt("mysql.port");
-    String host = props.getString("mysql.host");
-    String database = props.getString("mysql.database");
-    String user = props.getString("mysql.user");
-    String password = props.getString("mysql.password");
-    int numConnections = props.getInt("mysql.numconnections");
+    final int port = props.getInt("mysql.port");
+    final String host = props.getString("mysql.host");
+    final String database = props.getString("mysql.database");
+    final String user = props.getString("mysql.user");
+    final String password = props.getString("mysql.password");
+    final int numConnections = props.getInt("mysql.numconnections");
 
     return MySQLDataSource.getInstance(host, port, database, user, password, numConnections);
   }
@@ -129,7 +130,7 @@ public class AzkabanCommonModule extends AbstractModule {
   @Provides
   @Singleton
   public Configuration createHadoopConfiguration() {
-    final String hadoopConfDirPath = requireNonNull(props.get(HADOOP_CONF_DIR_PATH));
+    final String hadoopConfDirPath = requireNonNull(this.props.get(HADOOP_CONF_DIR_PATH));
 
     final File hadoopConfDir = new File(requireNonNull(hadoopConfDirPath));
     checkArgument(hadoopConfDir.exists() && hadoopConfDir.isDirectory());
@@ -147,14 +148,14 @@ public class AzkabanCommonModule extends AbstractModule {
   public FileSystem createHadoopFileSystem(final Configuration hadoopConf) {
     try {
       return FileSystem.get(hadoopConf);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       log.error("Unable to initialize HDFS", e);
       throw new AzkabanException(e);
     }
   }
 
   @Provides
-  public QueryRunner createQueryRunner(AzkabanDataSource dataSource) {
+  public QueryRunner createQueryRunner(final AzkabanDataSource dataSource) {
     return new QueryRunner(dataSource);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/AzkabanCommonModuleConfig.java b/azkaban-common/src/main/java/azkaban/AzkabanCommonModuleConfig.java
index 957efc6..6de763d 100644
--- a/azkaban-common/src/main/java/azkaban/AzkabanCommonModuleConfig.java
+++ b/azkaban-common/src/main/java/azkaban/AzkabanCommonModuleConfig.java
@@ -17,60 +17,58 @@
 
 package azkaban;
 
-import azkaban.db.AzkabanDataSource;
-import azkaban.db.H2FileDataSource;
-import azkaban.db.MySQLDataSource;
+import static azkaban.Constants.ConfigurationKeys.AZKABAN_STORAGE_HDFS_ROOT_URI;
+import static azkaban.Constants.ConfigurationKeys.AZKABAN_STORAGE_LOCAL_BASEDIR;
+import static azkaban.Constants.ConfigurationKeys.AZKABAN_STORAGE_TYPE;
+import static azkaban.storage.StorageImplementationType.DATABASE;
+
 import azkaban.storage.StorageImplementationType;
 import azkaban.utils.Props;
 import com.google.inject.Inject;
 import java.net.URI;
-import org.apache.commons.dbutils.QueryRunner;
 import org.apache.log4j.Logger;
 
-import static azkaban.Constants.ConfigurationKeys.*;
-import static azkaban.storage.StorageImplementationType.*;
-
 
 public class AzkabanCommonModuleConfig {
+
   private static final Logger log = Logger.getLogger(AzkabanCommonModuleConfig.class);
 
   private final Props props;
-
+  private final URI hdfsRootUri;
   /**
-   * Storage Implementation
-   * This can be any of the {@link StorageImplementationType} values in which case {@link StorageFactory} will create
-   * the appropriate storage instance. Or one can feed in a custom implementation class using the full qualified
-   * path required by a classloader.
+   * Storage Implementation This can be any of the {@link StorageImplementationType} values in which
+   * case {@link StorageFactory} will create the appropriate storage instance. Or one can feed in a
+   * custom implementation class using the full qualified path required by a classloader.
    *
    * examples: LOCAL, DATABASE, azkaban.storage.MyFavStorage
-   *
    */
   private String storageImplementation = DATABASE.name();
   private String localStorageBaseDirPath = "LOCAL_STORAGE";
-  private URI hdfsRootUri;
 
   @Inject
-  public AzkabanCommonModuleConfig(Props props) {
+  public AzkabanCommonModuleConfig(final Props props) {
     this.props = props;
 
-    storageImplementation = props.getString(AZKABAN_STORAGE_TYPE, storageImplementation);
-    localStorageBaseDirPath = props.getString(AZKABAN_STORAGE_LOCAL_BASEDIR, localStorageBaseDirPath);
-    hdfsRootUri = props.get(AZKABAN_STORAGE_HDFS_ROOT_URI) != null ? props.getUri(AZKABAN_STORAGE_HDFS_ROOT_URI) : null;
+    this.storageImplementation = props.getString(AZKABAN_STORAGE_TYPE, this.storageImplementation);
+    this.localStorageBaseDirPath = props
+        .getString(AZKABAN_STORAGE_LOCAL_BASEDIR, this.localStorageBaseDirPath);
+    this.hdfsRootUri = props.get(AZKABAN_STORAGE_HDFS_ROOT_URI) != null ? props
+        .getUri(AZKABAN_STORAGE_HDFS_ROOT_URI) : null;
   }
 
   public Props getProps() {
-    return props;
+    return this.props;
   }
 
   public String getStorageImplementation() {
-    return storageImplementation;
+    return this.storageImplementation;
   }
 
   public String getLocalStorageBaseDirPath() {
-    return localStorageBaseDirPath;
+    return this.localStorageBaseDirPath;
   }
 
   public URI getHdfsRootUri() {
-    return hdfsRootUri;
+    return this.hdfsRootUri;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/Constants.java b/azkaban-common/src/main/java/azkaban/Constants.java
index f3f1ecb..06c982c 100644
--- a/azkaban-common/src/main/java/azkaban/Constants.java
+++ b/azkaban-common/src/main/java/azkaban/Constants.java
@@ -22,13 +22,13 @@ package azkaban;
  *
  * Global place for storing constants.
  * Conventions:
- *  - All internal constants to be put in the root level ie. {@link Constants} class
- *  - All Configuration keys to be put in {@link ConfigurationKeys} class
- *  - Flow level Properties keys go to {@link FlowProperties}
- *  - Job  level Properties keys go to {@link JobProperties}
- *
+ * - All internal constants to be put in the root level ie. {@link Constants} class
+ * - All Configuration keys to be put in {@link ConfigurationKeys} class
+ * - Flow level Properties keys go to {@link FlowProperties}
+ * - Job  level Properties keys go to {@link JobProperties}
  */
 public class Constants {
+
   // Names and paths of various file names to configure Azkaban
   public static final String AZKABAN_PROPERTIES_FILE = "azkaban.properties";
   public static final String AZKABAN_PRIVATE_PROPERTIES_FILE = "azkaban.private.properties";
@@ -99,6 +99,7 @@ public class Constants {
   }
 
   public static class FlowProperties {
+
     // Basic properties of flows as set by the executor server
     public static final String AZKABAN_FLOW_PROJECT_NAME = "azkaban.flow.projectname";
     public static final String AZKABAN_FLOW_FLOW_ID = "azkaban.flow.flowid";
@@ -108,11 +109,13 @@ public class Constants {
   }
 
   public static class JobProperties {
+
     // Job property that enables/disables using Kafka logging of user job logs
     public static final String AZKABAN_JOB_LOGGING_KAFKA_ENABLE = "azkaban.job.logging.kafka.enable";
   }
 
   public static class JobCallbackProperties {
+
     public static final String JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT = "jobcallback.connection.request.timeout";
     public static final String JOBCALLBACK_CONNECTION_TIMEOUT = "jobcallback.connection.timeout";
     public static final String JOBCALLBACK_SOCKET_TIMEOUT = "jobcallback.socket.timeout";
diff --git a/azkaban-common/src/main/java/azkaban/database/DataSourceUtils.java b/azkaban-common/src/main/java/azkaban/database/DataSourceUtils.java
index 35f96f3..8d38222 100644
--- a/azkaban-common/src/main/java/azkaban/database/DataSourceUtils.java
+++ b/azkaban-common/src/main/java/azkaban/database/DataSourceUtils.java
@@ -33,9 +33,6 @@ public class DataSourceUtils {
 
   /**
    * Create Datasource from parameters in the properties
-   *
-   * @param props
-   * @return
    */
   public static AzkabanDataSource getDataSource(final Props props) {
     final String databaseType = props.getString("database.type");
@@ -64,14 +61,6 @@ public class DataSourceUtils {
 
   /**
    * Create a MySQL DataSource
-   *
-   * @param host
-   * @param port
-   * @param dbName
-   * @param user
-   * @param password
-   * @param numConnections
-   * @return
    */
   public static AzkabanDataSource getMySQLDataSource(final String host, final Integer port,
       final String dbName, final String user, final String password, final Integer numConnections) {
@@ -81,9 +70,6 @@ public class DataSourceUtils {
 
   /**
    * Create H2 DataSource
-   *
-   * @param file
-   * @return
    */
   public static AzkabanDataSource getH2DataSource(final Path file) {
     return new EmbeddedH2BasicDataSource(file);
@@ -117,7 +103,6 @@ public class DataSourceUtils {
 
   /**
    * MySQL data source based on AzkabanDataSource
-   *
    */
   public static class MySQLBasicDataSource extends AzkabanDataSource {
 
@@ -152,7 +137,6 @@ public class DataSourceUtils {
 
   /**
    * H2 Datasource
-   *
    */
   public static class EmbeddedH2BasicDataSource extends AzkabanDataSource {
 
diff --git a/azkaban-common/src/main/java/azkaban/event/Event.java b/azkaban-common/src/main/java/azkaban/event/Event.java
index 7eb59f9..fc79b4a 100644
--- a/azkaban-common/src/main/java/azkaban/event/Event.java
+++ b/azkaban-common/src/main/java/azkaban/event/Event.java
@@ -19,56 +19,58 @@ package azkaban.event;
 import com.google.common.base.Preconditions;
 
 public class Event {
-  public enum Type {
-    FLOW_STARTED,
-    FLOW_FINISHED,
-    JOB_STARTED,
-    JOB_FINISHED,
-    JOB_STATUS_CHANGED,
-    EXTERNAL_FLOW_UPDATED,
-    EXTERNAL_JOB_UPDATED
-  }
 
   private final Object runner;
   private final Type type;
   private final EventData eventData;
   private final long time;
 
-  private Event(Object runner, Type type, EventData eventData) {
+  private Event(final Object runner, final Type type, final EventData eventData) {
     this.runner = runner;
     this.type = type;
     this.eventData = eventData;
     this.time = System.currentTimeMillis();
   }
 
+  /**
+   * Creates a new event.
+   *
+   * @param runner runner.
+   * @param type type.
+   * @param eventData EventData, null is not allowed.
+   * @return New Event instance.
+   * @throws NullPointerException if EventData is null.
+   */
+  public static Event create(final Object runner, final Type type, final EventData eventData)
+      throws NullPointerException {
+    Preconditions.checkNotNull(eventData, "EventData was null");
+    return new Event(runner, type, eventData);
+  }
+
   public Object getRunner() {
-    return runner;
+    return this.runner;
   }
 
   public Type getType() {
-    return type;
+    return this.type;
   }
 
   public long getTime() {
-    return time;
+    return this.time;
   }
 
   public EventData getData() {
-    return eventData;
+    return this.eventData;
   }
 
-  /**
-   * Creates a new event.
-   *
-   * @param runner runner.
-   * @param type type.
-   * @param eventData EventData, null is not allowed.
-   * @return New Event instance.
-   * @throws NullPointerException if EventData is null.
-   */
-  public static Event create(Object runner, Type type, EventData eventData) throws NullPointerException {
-    Preconditions.checkNotNull(eventData, "EventData was null");
-    return new Event(runner, type, eventData);
+  public enum Type {
+    FLOW_STARTED,
+    FLOW_FINISHED,
+    JOB_STARTED,
+    JOB_FINISHED,
+    JOB_STATUS_CHANGED,
+    EXTERNAL_FLOW_UPDATED,
+    EXTERNAL_JOB_UPDATED
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/event/EventData.java b/azkaban-common/src/main/java/azkaban/event/EventData.java
index ae83bdd..e0ee98d 100644
--- a/azkaban-common/src/main/java/azkaban/event/EventData.java
+++ b/azkaban-common/src/main/java/azkaban/event/EventData.java
@@ -17,25 +17,26 @@ public class EventData {
    * @param status node status.
    * @param nestedId node id, corresponds to {@link ExecutableNode#getNestedId()}.
    */
-  public EventData(Status status, String nestedId) {
+  public EventData(final Status status, final String nestedId) {
     this.status = status;
     this.nestedId = nestedId;
   }
 
   /**
    * Creates a new EventData instance.
+   *
    * @param node node.
    */
-  public EventData(ExecutableNode node) {
+  public EventData(final ExecutableNode node) {
     this(node.getStatus(), node.getNestedId());
   }
 
   public Status getStatus() {
-    return status;
+    return this.status;
   }
 
   public String getNestedId() {
-    return nestedId;
+    return this.nestedId;
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/event/EventHandler.java b/azkaban-common/src/main/java/azkaban/event/EventHandler.java
index f568c5a..3a7460c 100644
--- a/azkaban-common/src/main/java/azkaban/event/EventHandler.java
+++ b/azkaban-common/src/main/java/azkaban/event/EventHandler.java
@@ -20,24 +20,25 @@ import java.util.ArrayList;
 import java.util.HashSet;
 
 public class EventHandler {
-  private HashSet<EventListener> listeners = new HashSet<EventListener>();
+
+  private final HashSet<EventListener> listeners = new HashSet<>();
 
   public EventHandler() {
   }
 
-  public void addListener(EventListener listener) {
-    listeners.add(listener);
+  public void addListener(final EventListener listener) {
+    this.listeners.add(listener);
   }
 
-  public void fireEventListeners(Event event) {
-    ArrayList<EventListener> listeners =
-        new ArrayList<EventListener>(this.listeners);
-    for (EventListener listener : listeners) {
+  public void fireEventListeners(final Event event) {
+    final ArrayList<EventListener> listeners =
+        new ArrayList<>(this.listeners);
+    for (final EventListener listener : listeners) {
       listener.handleEvent(event);
     }
   }
 
-  public void removeListener(EventListener listener) {
-    listeners.remove(listener);
+  public void removeListener(final EventListener listener) {
+    this.listeners.remove(listener);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/event/EventListener.java b/azkaban-common/src/main/java/azkaban/event/EventListener.java
index a1d34a7..596833f 100644
--- a/azkaban-common/src/main/java/azkaban/event/EventListener.java
+++ b/azkaban-common/src/main/java/azkaban/event/EventListener.java
@@ -17,5 +17,6 @@
 package azkaban.event;
 
 public interface EventListener {
+
   public void handleEvent(Event event);
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/AlerterHolder.java b/azkaban-common/src/main/java/azkaban/executor/AlerterHolder.java
index 10c502a..9c3d799 100644
--- a/azkaban-common/src/main/java/azkaban/executor/AlerterHolder.java
+++ b/azkaban-common/src/main/java/azkaban/executor/AlerterHolder.java
@@ -37,55 +37,54 @@ import org.apache.log4j.Logger;
 
 
 public class AlerterHolder {
-  private Map<String, Alerter> alerters;
 
-  private static Logger logger = Logger.getLogger(AlerterHolder.class);
+  private static final Logger logger = Logger.getLogger(AlerterHolder.class);
+  private Map<String, Alerter> alerters;
 
   @Inject
-  public AlerterHolder(Props props) {
+  public AlerterHolder(final Props props) {
     try {
-      alerters = loadAlerters(props);
-    }
-    catch (Exception ex) {
+      this.alerters = loadAlerters(props);
+    } catch (final Exception ex) {
       logger.error(ex);
-      alerters = new HashMap<>();
+      this.alerters = new HashMap<>();
     }
   }
 
-  private Map<String, Alerter> loadAlerters(Props props) {
-    Map<String, Alerter> allAlerters = new HashMap<String, Alerter>();
+  private Map<String, Alerter> loadAlerters(final Props props) {
+    final Map<String, Alerter> allAlerters = new HashMap<>();
     // load built-in alerters
-    Emailer mailAlerter = new Emailer(props);
+    final Emailer mailAlerter = new Emailer(props);
     allAlerters.put("email", mailAlerter);
     // load all plugin alerters
-    String pluginDir = props.getString("alerter.plugin.dir", "plugins/alerter");
+    final String pluginDir = props.getString("alerter.plugin.dir", "plugins/alerter");
     allAlerters.putAll(loadPluginAlerters(pluginDir));
     return allAlerters;
   }
 
-  private Map<String, Alerter> loadPluginAlerters(String pluginPath) {
-    File alerterPluginPath = new File(pluginPath);
+  private Map<String, Alerter> loadPluginAlerters(final String pluginPath) {
+    final File alerterPluginPath = new File(pluginPath);
     if (!alerterPluginPath.exists()) {
-      return Collections.<String, Alerter> emptyMap();
+      return Collections.<String, Alerter>emptyMap();
     }
 
-    Map<String, Alerter> installedAlerterPlugins =
-        new HashMap<String, Alerter>();
-    ClassLoader parentLoader = getClass().getClassLoader();
-    File[] pluginDirs = alerterPluginPath.listFiles();
-    ArrayList<String> jarPaths = new ArrayList<String>();
-    for (File pluginDir : pluginDirs) {
+    final Map<String, Alerter> installedAlerterPlugins =
+        new HashMap<>();
+    final ClassLoader parentLoader = getClass().getClassLoader();
+    final File[] pluginDirs = alerterPluginPath.listFiles();
+    final ArrayList<String> jarPaths = new ArrayList<>();
+    for (final File pluginDir : pluginDirs) {
       if (!pluginDir.isDirectory()) {
         logger.error("The plugin path " + pluginDir + " is not a directory.");
         continue;
       }
 
       // Load the conf directory
-      File propertiesDir = new File(pluginDir, "conf");
+      final File propertiesDir = new File(pluginDir, "conf");
       Props pluginProps = null;
       if (propertiesDir.exists() && propertiesDir.isDirectory()) {
-        File propertiesFile = new File(propertiesDir, "plugin.properties");
-        File propertiesOverrideFile =
+        final File propertiesFile = new File(propertiesDir, "plugin.properties");
+        final File propertiesOverrideFile =
             new File(propertiesDir, "override.properties");
 
         if (propertiesFile.exists()) {
@@ -105,12 +104,12 @@ public class AlerterHolder {
         continue;
       }
 
-      String pluginName = pluginProps.getString("alerter.name");
-      List<String> extLibClasspath =
+      final String pluginName = pluginProps.getString("alerter.name");
+      final List<String> extLibClasspath =
           pluginProps.getStringList("alerter.external.classpaths",
               (List<String>) null);
 
-      String pluginClass = pluginProps.getString("alerter.class");
+      final String pluginClass = pluginProps.getString("alerter.class");
       if (pluginClass == null) {
         logger.error("Alerter class is not set.");
       } else {
@@ -118,26 +117,26 @@ public class AlerterHolder {
       }
 
       URLClassLoader urlClassLoader = null;
-      File libDir = new File(pluginDir, "lib");
+      final File libDir = new File(pluginDir, "lib");
       if (libDir.exists() && libDir.isDirectory()) {
-        File[] files = libDir.listFiles();
+        final File[] files = libDir.listFiles();
 
-        ArrayList<URL> urls = new ArrayList<URL>();
+        final ArrayList<URL> urls = new ArrayList<>();
         for (int i = 0; i < files.length; ++i) {
           try {
-            URL url = files[i].toURI().toURL();
+            final URL url = files[i].toURI().toURL();
             urls.add(url);
-          } catch (MalformedURLException e) {
+          } catch (final MalformedURLException e) {
             logger.error(e);
           }
         }
         if (extLibClasspath != null) {
-          for (String extLib : extLibClasspath) {
+          for (final String extLib : extLibClasspath) {
             try {
-              File file = new File(pluginDir, extLib);
-              URL url = file.toURI().toURL();
+              final File file = new File(pluginDir, extLib);
+              final URL url = file.toURI().toURL();
               urls.add(url);
-            } catch (MalformedURLException e) {
+            } catch (final MalformedURLException e) {
               logger.error(e);
             }
           }
@@ -153,19 +152,19 @@ public class AlerterHolder {
       Class<?> alerterClass = null;
       try {
         alerterClass = urlClassLoader.loadClass(pluginClass);
-      } catch (ClassNotFoundException e) {
+      } catch (final ClassNotFoundException e) {
         logger.error("Class " + pluginClass + " not found.");
         continue;
       }
 
-      String source = FileIOUtils.getSourcePathFromClass(alerterClass);
+      final String source = FileIOUtils.getSourcePathFromClass(alerterClass);
       logger.info("Source jar " + source);
       jarPaths.add("jar:file:" + source);
 
       Constructor<?> constructor = null;
       try {
         constructor = alerterClass.getConstructor(Props.class);
-      } catch (NoSuchMethodException e) {
+      } catch (final NoSuchMethodException e) {
         logger.error("Constructor not found in " + pluginClass);
         continue;
       }
@@ -173,7 +172,7 @@ public class AlerterHolder {
       Object obj = null;
       try {
         obj = constructor.newInstance(pluginProps);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         logger.error(e);
       }
 
@@ -182,14 +181,14 @@ public class AlerterHolder {
         continue;
       }
 
-      Alerter plugin = (Alerter) obj;
+      final Alerter plugin = (Alerter) obj;
       installedAlerterPlugins.put(pluginName, plugin);
     }
 
     return installedAlerterPlugins;
   }
 
-  public Alerter get(String alerterType) {
+  public Alerter get(final String alerterType) {
     return this.alerters.get(alerterType);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ConnectorParams.java b/azkaban-common/src/main/java/azkaban/executor/ConnectorParams.java
index d13a0f1..8b32604 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ConnectorParams.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ConnectorParams.java
@@ -17,6 +17,7 @@
 package azkaban.executor;
 
 public interface ConnectorParams {
+
   public static final String EXECUTOR_ID_PARAM = "executorId";
   public static final String ACTION_PARAM = "action";
   public static final String EXECID_PARAM = "execid";
@@ -101,8 +102,8 @@ public interface ConnectorParams {
   public static final String STATS_MAP_METRICNAMEPARAM = "metricName";
 
   /**
-   * useStats param is used to filter datapoints on /stats graph by using standard deviation and means
-   * By default, we consider only top/bottom 5% datapoints
+   * useStats param is used to filter datapoints on /stats graph by using standard deviation and
+   * means By default, we consider only top/bottom 5% datapoints
    */
 
   public static final String STATS_MAP_METRICRETRIEVALMODE = "useStats";
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutableFlow.java b/azkaban-common/src/main/java/azkaban/executor/ExecutableFlow.java
index ea72c80..4b6598d 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutableFlow.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutableFlow.java
@@ -15,7 +15,10 @@
  */
 package azkaban.executor;
 
+import azkaban.flow.Flow;
+import azkaban.project.Project;
 import azkaban.sla.SlaOption;
+import azkaban.utils.TypedMapWrapper;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -25,12 +28,9 @@ import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 
-import azkaban.flow.Flow;
-import azkaban.project.Project;
-import azkaban.utils.TypedMapWrapper;
-
 
 public class ExecutableFlow extends ExecutableFlowBase {
+
   public static final String EXECUTIONID_PARAM = "executionId";
   public static final String EXECUTIONPATH_PARAM = "executionPath";
   public static final String EXECUTIONOPTIONS_PARAM = "executionOptions";
@@ -44,8 +44,7 @@ public class ExecutableFlow extends ExecutableFlowBase {
   public static final String LASTMODIFIEDTIME_PARAM = "lastModfiedTime";
   public static final String LASTMODIFIEDUSER_PARAM = "lastModifiedUser";
   public static final String SLAOPTIONS_PARAM = "slaOptions";
-
-
+  private final HashSet<String> proxyUsers = new HashSet<>();
   private int executionId = -1;
   private int scheduleId = -1;
   private int projectId;
@@ -56,12 +55,10 @@ public class ExecutableFlow extends ExecutableFlowBase {
   private long lastModifiedTimestamp;
   private String submitUser;
   private String executionPath;
-
-  private HashSet<String> proxyUsers = new HashSet<String>();
   private ExecutionOptions executionOptions;
   private List<SlaOption> slaOptions = new ArrayList<>();
 
-  public ExecutableFlow(Project project, Flow flow) {
+  public ExecutableFlow(final Project project, final Flow flow) {
     this.projectId = project.getId();
     this.projectName = project.getName();
     this.version = project.getVersion();
@@ -74,6 +71,14 @@ public class ExecutableFlow extends ExecutableFlowBase {
   public ExecutableFlow() {
   }
 
+  public static ExecutableFlow createExecutableFlowFromObject(final Object obj) {
+    final ExecutableFlow exFlow = new ExecutableFlow();
+    final HashMap<String, Object> flowObj = (HashMap<String, Object>) obj;
+    exFlow.fillExecutableFromMapObject(flowObj);
+
+    return exFlow;
+  }
+
   @Override
   public String getId() {
     return getFlowId();
@@ -84,155 +89,154 @@ public class ExecutableFlow extends ExecutableFlowBase {
     return this;
   }
 
-  public void addAllProxyUsers(Collection<String> proxyUsers) {
+  public void addAllProxyUsers(final Collection<String> proxyUsers) {
     this.proxyUsers.addAll(proxyUsers);
   }
 
   public Set<String> getProxyUsers() {
-    return new HashSet<String>(this.proxyUsers);
+    return new HashSet<>(this.proxyUsers);
   }
 
-  public void setExecutionOptions(ExecutionOptions options) {
-    executionOptions = options;
+  public ExecutionOptions getExecutionOptions() {
+    return this.executionOptions;
   }
 
-  public ExecutionOptions getExecutionOptions() {
-    return executionOptions;
+  public void setExecutionOptions(final ExecutionOptions options) {
+    this.executionOptions = options;
   }
 
   public List<SlaOption> getSlaOptions() {
-    return slaOptions;
+    return this.slaOptions;
   }
 
+  public void setSlaOptions(final List<SlaOption> slaOptions) {
+    this.slaOptions = slaOptions;
+  }
 
   @Override
-  protected void setFlow(Project project, Flow flow) {
+  protected void setFlow(final Project project, final Flow flow) {
     super.setFlow(project, flow);
-    executionOptions = new ExecutionOptions();
-    executionOptions.setMailCreator(flow.getMailCreator());
+    this.executionOptions = new ExecutionOptions();
+    this.executionOptions.setMailCreator(flow.getMailCreator());
 
     if (flow.getSuccessEmails() != null) {
-      executionOptions.setSuccessEmails(flow.getSuccessEmails());
+      this.executionOptions.setSuccessEmails(flow.getSuccessEmails());
     }
     if (flow.getFailureEmails() != null) {
-      executionOptions.setFailureEmails(flow.getFailureEmails());
+      this.executionOptions.setFailureEmails(flow.getFailureEmails());
     }
   }
 
   @Override
   public int getExecutionId() {
-    return executionId;
+    return this.executionId;
   }
 
-  public void setExecutionId(int executionId) {
+  public void setExecutionId(final int executionId) {
     this.executionId = executionId;
   }
 
   @Override
   public long getLastModifiedTimestamp() {
-    return lastModifiedTimestamp;
+    return this.lastModifiedTimestamp;
   }
 
-  public void setLastModifiedTimestamp(long lastModifiedTimestamp) {
+  public void setLastModifiedTimestamp(final long lastModifiedTimestamp) {
     this.lastModifiedTimestamp = lastModifiedTimestamp;
   }
 
   @Override
   public String getLastModifiedByUser() {
-    return lastModifiedUser;
+    return this.lastModifiedUser;
   }
 
-  public void setLastModifiedByUser(String lastModifiedUser) {
+  public void setLastModifiedByUser(final String lastModifiedUser) {
     this.lastModifiedUser = lastModifiedUser;
   }
 
   @Override
   public int getProjectId() {
-    return projectId;
+    return this.projectId;
   }
 
-  public void setProjectId(int projectId) {
+  public void setProjectId(final int projectId) {
     this.projectId = projectId;
   }
 
   @Override
   public String getProjectName() {
-    return projectName;
+    return this.projectName;
   }
 
   public int getScheduleId() {
-    return scheduleId;
+    return this.scheduleId;
   }
 
-  public void setScheduleId(int scheduleId) {
+  public void setScheduleId(final int scheduleId) {
     this.scheduleId = scheduleId;
   }
 
   public String getExecutionPath() {
-    return executionPath;
+    return this.executionPath;
   }
 
-  public void setExecutionPath(String executionPath) {
+  public void setExecutionPath(final String executionPath) {
     this.executionPath = executionPath;
   }
 
   public String getSubmitUser() {
-    return submitUser;
+    return this.submitUser;
   }
 
-  public void setSubmitUser(String submitUser) {
+  public void setSubmitUser(final String submitUser) {
     this.submitUser = submitUser;
   }
 
   @Override
   public int getVersion() {
-    return version;
+    return this.version;
   }
 
-  public void setVersion(int version) {
+  public void setVersion(final int version) {
     this.version = version;
   }
 
   public long getSubmitTime() {
-    return submitTime;
+    return this.submitTime;
   }
 
-  public void setSubmitTime(long submitTime) {
+  public void setSubmitTime(final long submitTime) {
     this.submitTime = submitTime;
   }
 
-  public void setSlaOptions(List<SlaOption> slaOptions) {
-    this.slaOptions = slaOptions;
-  }
-
   @Override
   public Map<String, Object> toObject() {
-    HashMap<String, Object> flowObj = new HashMap<String, Object>();
+    final HashMap<String, Object> flowObj = new HashMap<>();
     fillMapFromExecutable(flowObj);
 
-    flowObj.put(EXECUTIONID_PARAM, executionId);
-    flowObj.put(EXECUTIONPATH_PARAM, executionPath);
-    flowObj.put(PROJECTID_PARAM, projectId);
-    flowObj.put(PROJECTNAME_PARAM, projectName);
+    flowObj.put(EXECUTIONID_PARAM, this.executionId);
+    flowObj.put(EXECUTIONPATH_PARAM, this.executionPath);
+    flowObj.put(PROJECTID_PARAM, this.projectId);
+    flowObj.put(PROJECTNAME_PARAM, this.projectName);
 
-    if (scheduleId >= 0) {
-      flowObj.put(SCHEDULEID_PARAM, scheduleId);
+    if (this.scheduleId >= 0) {
+      flowObj.put(SCHEDULEID_PARAM, this.scheduleId);
     }
 
-    flowObj.put(SUBMITUSER_PARAM, submitUser);
-    flowObj.put(VERSION_PARAM, version);
-    flowObj.put(LASTMODIFIEDTIME_PARAM, lastModifiedTimestamp);
-    flowObj.put(LASTMODIFIEDUSER_PARAM, lastModifiedUser);
+    flowObj.put(SUBMITUSER_PARAM, this.submitUser);
+    flowObj.put(VERSION_PARAM, this.version);
+    flowObj.put(LASTMODIFIEDTIME_PARAM, this.lastModifiedTimestamp);
+    flowObj.put(LASTMODIFIEDUSER_PARAM, this.lastModifiedUser);
 
     flowObj.put(EXECUTIONOPTIONS_PARAM, this.executionOptions.toObject());
-    flowObj.put(VERSION_PARAM, version);
+    flowObj.put(VERSION_PARAM, this.version);
 
-    ArrayList<String> proxyUserList = new ArrayList<String>(proxyUsers);
+    final ArrayList<String> proxyUserList = new ArrayList<>(this.proxyUsers);
     flowObj.put(PROXYUSERS_PARAM, proxyUserList);
 
-    flowObj.put(SUBMITTIME_PARAM, submitTime);
+    flowObj.put(SUBMITTIME_PARAM, this.submitTime);
 
-    List<Map<String, Object>> slaOptions = new ArrayList<>();
+    final List<Map<String, Object>> slaOptions = new ArrayList<>();
     this.getSlaOptions().stream().forEach((slaOption) -> slaOptions.add(slaOption.toObject()));
 
     flowObj.put(SLAOPTIONS_PARAM, slaOptions);
@@ -240,18 +244,9 @@ public class ExecutableFlow extends ExecutableFlowBase {
     return flowObj;
   }
 
-  @SuppressWarnings("unchecked")
-    public static ExecutableFlow createExecutableFlowFromObject(Object obj) {
-    ExecutableFlow exFlow = new ExecutableFlow();
-    HashMap<String, Object> flowObj = (HashMap<String, Object>) obj;
-    exFlow.fillExecutableFromMapObject(flowObj);
-
-    return exFlow;
-  }
-
   @Override
   public void fillExecutableFromMapObject(
-      TypedMapWrapper<String, Object> flowObj) {
+      final TypedMapWrapper<String, Object> flowObj) {
     super.fillExecutableFromMapObject(flowObj);
 
     this.executionId = flowObj.getInt(EXECUTIONID_PARAM);
@@ -276,20 +271,21 @@ public class ExecutableFlow extends ExecutableFlowBase {
     }
 
     if (flowObj.containsKey(PROXYUSERS_PARAM)) {
-      List<String> proxyUserList = flowObj.<String> getList(PROXYUSERS_PARAM);
+      final List<String> proxyUserList = flowObj.<String>getList(PROXYUSERS_PARAM);
       this.addAllProxyUsers(proxyUserList);
     }
 
     if (flowObj.containsKey(SLAOPTIONS_PARAM)) {
-      List<SlaOption> slaOptions =
-          flowObj.getList(SLAOPTIONS_PARAM).stream().map(SlaOption::fromObject).collect(Collectors.toList());
+      final List<SlaOption> slaOptions =
+          flowObj.getList(SLAOPTIONS_PARAM).stream().map(SlaOption::fromObject)
+              .collect(Collectors.toList());
       this.setSlaOptions(slaOptions);
     }
   }
 
   @Override
-  public Map<String, Object> toUpdateObject(long lastUpdateTime) {
-    Map<String, Object> updateData = super.toUpdateObject(lastUpdateTime);
+  public Map<String, Object> toUpdateObject(final long lastUpdateTime) {
+    final Map<String, Object> updateData = super.toUpdateObject(lastUpdateTime);
     updateData.put(EXECUTIONID_PARAM, this.executionId);
     return updateData;
   }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowBase.java b/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowBase.java
index 41389b6..a714dec 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowBase.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowBase.java
@@ -15,12 +15,6 @@
  */
 package azkaban.executor;
 
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
 import azkaban.flow.Edge;
 import azkaban.flow.Flow;
 import azkaban.flow.FlowProps;
@@ -28,25 +22,30 @@ import azkaban.flow.Node;
 import azkaban.flow.SpecialJobTypes;
 import azkaban.project.Project;
 import azkaban.utils.TypedMapWrapper;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 public class ExecutableFlowBase extends ExecutableNode {
+
   public static final String FLOW_ID_PARAM = "flowId";
   public static final String NODES_PARAM = "nodes";
   public static final String PROPERTIES_PARAM = "properties";
   public static final String SOURCE_PARAM = "source";
   public static final String INHERITED_PARAM = "inherited";
 
-  private HashMap<String, ExecutableNode> executableNodes =
-      new HashMap<String, ExecutableNode>();
+  private final HashMap<String, ExecutableNode> executableNodes =
+      new HashMap<>();
+  private final HashMap<String, FlowProps> flowProps =
+      new HashMap<>();
   private ArrayList<String> startNodes;
   private ArrayList<String> endNodes;
-
-  private HashMap<String, FlowProps> flowProps =
-      new HashMap<String, FlowProps>();
   private String flowId;
 
-  public ExecutableFlowBase(Project project, Node node, Flow flow,
-      ExecutableFlowBase parent) {
+  public ExecutableFlowBase(final Project project, final Node node, final Flow flow,
+      final ExecutableFlowBase parent) {
     super(node, parent);
 
     setFlow(project, flow);
@@ -104,35 +103,35 @@ public class ExecutableFlowBase extends ExecutableNode {
   }
 
   public Collection<FlowProps> getFlowProps() {
-    return flowProps.values();
+    return this.flowProps.values();
   }
 
   public String getFlowId() {
-    return flowId;
+    return this.flowId;
   }
 
-  protected void setFlow(Project project, Flow flow) {
+  protected void setFlow(final Project project, final Flow flow) {
     this.flowId = flow.getId();
-    flowProps.putAll(flow.getAllFlowProps());
+    this.flowProps.putAll(flow.getAllFlowProps());
 
-    for (Node node : flow.getNodes()) {
-      String id = node.getId();
+    for (final Node node : flow.getNodes()) {
+      final String id = node.getId();
       if (node.getType().equals(SpecialJobTypes.EMBEDDED_FLOW_TYPE)) {
-        String embeddedFlowId = node.getEmbeddedFlowId();
-        Flow subFlow = project.getFlow(embeddedFlowId);
+        final String embeddedFlowId = node.getEmbeddedFlowId();
+        final Flow subFlow = project.getFlow(embeddedFlowId);
 
-        ExecutableFlowBase embeddedFlow =
+        final ExecutableFlowBase embeddedFlow =
             new ExecutableFlowBase(project, node, subFlow, this);
-        executableNodes.put(id, embeddedFlow);
+        this.executableNodes.put(id, embeddedFlow);
       } else {
-        ExecutableNode exNode = new ExecutableNode(node, this);
-        executableNodes.put(id, exNode);
+        final ExecutableNode exNode = new ExecutableNode(node, this);
+        this.executableNodes.put(id, exNode);
       }
     }
 
-    for (Edge edge : flow.getEdges()) {
-      ExecutableNode sourceNode = executableNodes.get(edge.getSourceId());
-      ExecutableNode targetNode = executableNodes.get(edge.getTargetId());
+    for (final Edge edge : flow.getEdges()) {
+      final ExecutableNode sourceNode = this.executableNodes.get(edge.getSourceId());
+      final ExecutableNode targetNode = this.executableNodes.get(edge.getTargetId());
 
       if (sourceNode == null) {
         System.out.println("Source node " + edge.getSourceId()
@@ -144,25 +143,25 @@ public class ExecutableFlowBase extends ExecutableNode {
   }
 
   public List<ExecutableNode> getExecutableNodes() {
-    return new ArrayList<ExecutableNode>(executableNodes.values());
+    return new ArrayList<>(this.executableNodes.values());
   }
 
-  public ExecutableNode getExecutableNode(String id) {
-    return executableNodes.get(id);
+  public ExecutableNode getExecutableNode(final String id) {
+    return this.executableNodes.get(id);
   }
 
-  public ExecutableNode getExecutableNodePath(String ids) {
-    String[] split = ids.split(":");
+  public ExecutableNode getExecutableNodePath(final String ids) {
+    final String[] split = ids.split(":");
     return getExecutableNodePath(split);
   }
 
-  public ExecutableNode getExecutableNodePath(String... ids) {
+  public ExecutableNode getExecutableNodePath(final String... ids) {
     return getExecutableNodePath(this, ids, 0);
   }
 
-  private ExecutableNode getExecutableNodePath(ExecutableFlowBase flow,
-      String[] ids, int currentIdIdx) {
-    ExecutableNode node = flow.getExecutableNode(ids[currentIdIdx]);
+  private ExecutableNode getExecutableNodePath(final ExecutableFlowBase flow,
+      final String[] ids, int currentIdIdx) {
+    final ExecutableNode node = flow.getExecutableNode(ids[currentIdIdx]);
     currentIdIdx++;
 
     if (node == null) {
@@ -180,57 +179,57 @@ public class ExecutableFlowBase extends ExecutableNode {
   }
 
   public List<String> getStartNodes() {
-    if (startNodes == null) {
-      startNodes = new ArrayList<String>();
-      for (ExecutableNode node : executableNodes.values()) {
+    if (this.startNodes == null) {
+      this.startNodes = new ArrayList<>();
+      for (final ExecutableNode node : this.executableNodes.values()) {
         if (node.getInNodes().isEmpty()) {
-          startNodes.add(node.getId());
+          this.startNodes.add(node.getId());
         }
       }
     }
 
-    return startNodes;
+    return this.startNodes;
   }
 
   public List<String> getEndNodes() {
-    if (endNodes == null) {
-      endNodes = new ArrayList<String>();
-      for (ExecutableNode node : executableNodes.values()) {
+    if (this.endNodes == null) {
+      this.endNodes = new ArrayList<>();
+      for (final ExecutableNode node : this.executableNodes.values()) {
         if (node.getOutNodes().isEmpty()) {
-          endNodes.add(node.getId());
+          this.endNodes.add(node.getId());
         }
       }
     }
 
-    return endNodes;
+    return this.endNodes;
   }
 
   @Override
   public Map<String, Object> toObject() {
-    Map<String, Object> mapObj = new HashMap<String, Object>();
+    final Map<String, Object> mapObj = new HashMap<>();
     fillMapFromExecutable(mapObj);
 
     return mapObj;
   }
 
   @Override
-  protected void fillMapFromExecutable(Map<String, Object> flowObjMap) {
+  protected void fillMapFromExecutable(final Map<String, Object> flowObjMap) {
     super.fillMapFromExecutable(flowObjMap);
 
-    flowObjMap.put(FLOW_ID_PARAM, flowId);
+    flowObjMap.put(FLOW_ID_PARAM, this.flowId);
 
-    ArrayList<Object> nodes = new ArrayList<Object>();
-    for (ExecutableNode node : executableNodes.values()) {
+    final ArrayList<Object> nodes = new ArrayList<>();
+    for (final ExecutableNode node : this.executableNodes.values()) {
       nodes.add(node.toObject());
     }
     flowObjMap.put(NODES_PARAM, nodes);
 
     // Flow properties
-    ArrayList<Object> props = new ArrayList<Object>();
-    for (FlowProps fprop : flowProps.values()) {
-      HashMap<String, Object> propObj = new HashMap<String, Object>();
-      String source = fprop.getSource();
-      String inheritedSource = fprop.getInheritedSource();
+    final ArrayList<Object> props = new ArrayList<>();
+    for (final FlowProps fprop : this.flowProps.values()) {
+      final HashMap<String, Object> propObj = new HashMap<>();
+      final String source = fprop.getSource();
+      final String inheritedSource = fprop.getInheritedSource();
 
       propObj.put(SOURCE_PARAM, source);
       if (inheritedSource != null) {
@@ -243,56 +242,54 @@ public class ExecutableFlowBase extends ExecutableNode {
 
   @Override
   public void fillExecutableFromMapObject(
-      TypedMapWrapper<String, Object> flowObjMap) {
+      final TypedMapWrapper<String, Object> flowObjMap) {
     super.fillExecutableFromMapObject(flowObjMap);
 
     this.flowId = flowObjMap.getString(FLOW_ID_PARAM);
-    List<Object> nodes = flowObjMap.<Object> getList(NODES_PARAM);
+    final List<Object> nodes = flowObjMap.<Object>getList(NODES_PARAM);
 
     if (nodes != null) {
-      for (Object nodeObj : nodes) {
-        @SuppressWarnings("unchecked")
-        Map<String, Object> nodeObjMap = (Map<String, Object>) nodeObj;
-        TypedMapWrapper<String, Object> wrapper =
-            new TypedMapWrapper<String, Object>(nodeObjMap);
+      for (final Object nodeObj : nodes) {
+        final Map<String, Object> nodeObjMap = (Map<String, Object>) nodeObj;
+        final TypedMapWrapper<String, Object> wrapper =
+            new TypedMapWrapper<>(nodeObjMap);
 
-        String type = wrapper.getString(TYPE_PARAM);
+        final String type = wrapper.getString(TYPE_PARAM);
         if (type != null && type.equals(SpecialJobTypes.EMBEDDED_FLOW_TYPE)) {
-          ExecutableFlowBase exFlow = new ExecutableFlowBase();
+          final ExecutableFlowBase exFlow = new ExecutableFlowBase();
           exFlow.fillExecutableFromMapObject(wrapper);
           exFlow.setParentFlow(this);
 
-          executableNodes.put(exFlow.getId(), exFlow);
+          this.executableNodes.put(exFlow.getId(), exFlow);
         } else {
-          ExecutableNode exJob = new ExecutableNode();
+          final ExecutableNode exJob = new ExecutableNode();
           exJob.fillExecutableFromMapObject(nodeObjMap);
           exJob.setParentFlow(this);
 
-          executableNodes.put(exJob.getId(), exJob);
+          this.executableNodes.put(exJob.getId(), exJob);
         }
       }
     }
 
-    List<Object> properties = flowObjMap.<Object> getList(PROPERTIES_PARAM);
-    for (Object propNode : properties) {
-      @SuppressWarnings("unchecked")
-      HashMap<String, Object> fprop = (HashMap<String, Object>) propNode;
-      String source = (String) fprop.get("source");
-      String inheritedSource = (String) fprop.get("inherited");
+    final List<Object> properties = flowObjMap.<Object>getList(PROPERTIES_PARAM);
+    for (final Object propNode : properties) {
+      final HashMap<String, Object> fprop = (HashMap<String, Object>) propNode;
+      final String source = (String) fprop.get("source");
+      final String inheritedSource = (String) fprop.get("inherited");
 
-      FlowProps flowProps = new FlowProps(inheritedSource, source);
+      final FlowProps flowProps = new FlowProps(inheritedSource, source);
       this.flowProps.put(source, flowProps);
     }
   }
 
-  public Map<String, Object> toUpdateObject(long lastUpdateTime) {
-    Map<String, Object> updateData = super.toUpdateObject();
+  public Map<String, Object> toUpdateObject(final long lastUpdateTime) {
+    final Map<String, Object> updateData = super.toUpdateObject();
 
-    List<Map<String, Object>> updatedNodes =
-        new ArrayList<Map<String, Object>>();
-    for (ExecutableNode node : executableNodes.values()) {
+    final List<Map<String, Object>> updatedNodes =
+        new ArrayList<>();
+    for (final ExecutableNode node : this.executableNodes.values()) {
       if (node instanceof ExecutableFlowBase) {
-        Map<String, Object> updatedNodeMap =
+        final Map<String, Object> updatedNodeMap =
             ((ExecutableFlowBase) node).toUpdateObject(lastUpdateTime);
         // We add only flows to the list which either have a good update time,
         // or has updated descendants.
@@ -302,7 +299,7 @@ public class ExecutableFlowBase extends ExecutableNode {
         }
       } else {
         if (node.getUpdateTime() > lastUpdateTime) {
-          Map<String, Object> updatedNodeMap = node.toUpdateObject();
+          final Map<String, Object> updatedNodeMap = node.toUpdateObject();
           updatedNodes.add(updatedNodeMap);
         }
       }
@@ -319,28 +316,28 @@ public class ExecutableFlowBase extends ExecutableNode {
     return updateData;
   }
 
-  public void applyUpdateObject(TypedMapWrapper<String, Object> updateData,
-      List<ExecutableNode> updatedNodes) {
+  public void applyUpdateObject(final TypedMapWrapper<String, Object> updateData,
+      final List<ExecutableNode> updatedNodes) {
     super.applyUpdateObject(updateData);
 
     if (updatedNodes != null) {
       updatedNodes.add(this);
     }
 
-    List<Map<String, Object>> nodes =
+    final List<Map<String, Object>> nodes =
         (List<Map<String, Object>>) updateData
-        .<Map<String, Object>> getList(NODES_PARAM);
+            .<Map<String, Object>>getList(NODES_PARAM);
     if (nodes != null) {
-      for (Map<String, Object> node : nodes) {
-        TypedMapWrapper<String, Object> nodeWrapper =
-            new TypedMapWrapper<String, Object>(node);
+      for (final Map<String, Object> node : nodes) {
+        final TypedMapWrapper<String, Object> nodeWrapper =
+            new TypedMapWrapper<>(node);
         String id = nodeWrapper.getString(ID_PARAM);
         if (id == null) {
           // Legacy case
           id = nodeWrapper.getString("jobId");
         }
 
-        ExecutableNode exNode = executableNodes.get(id);
+        final ExecutableNode exNode = this.executableNodes.get(id);
         if (updatedNodes != null) {
           updatedNodes.add(exNode);
         }
@@ -355,24 +352,24 @@ public class ExecutableFlowBase extends ExecutableNode {
     }
   }
 
-  public void applyUpdateObject(Map<String, Object> updateData,
-      List<ExecutableNode> updatedNodes) {
-    TypedMapWrapper<String, Object> typedMapWrapper =
-        new TypedMapWrapper<String, Object>(updateData);
+  public void applyUpdateObject(final Map<String, Object> updateData,
+      final List<ExecutableNode> updatedNodes) {
+    final TypedMapWrapper<String, Object> typedMapWrapper =
+        new TypedMapWrapper<>(updateData);
     applyUpdateObject(typedMapWrapper, updatedNodes);
   }
 
   @Override
-  public void applyUpdateObject(Map<String, Object> updateData) {
-    TypedMapWrapper<String, Object> typedMapWrapper =
-        new TypedMapWrapper<String, Object>(updateData);
+  public void applyUpdateObject(final Map<String, Object> updateData) {
+    final TypedMapWrapper<String, Object> typedMapWrapper =
+        new TypedMapWrapper<>(updateData);
     applyUpdateObject(typedMapWrapper, null);
   }
 
-  public void reEnableDependents(ExecutableNode... nodes) {
-    for (ExecutableNode node : nodes) {
-      for (String dependent : node.getOutNodes()) {
-        ExecutableNode dependentNode = getExecutableNode(dependent);
+  public void reEnableDependents(final ExecutableNode... nodes) {
+    for (final ExecutableNode node : nodes) {
+      for (final String dependent : node.getOutNodes()) {
+        final ExecutableNode dependentNode = getExecutableNode(dependent);
 
         if (dependentNode.getStatus() == Status.KILLED) {
           dependentNode.setStatus(Status.READY);
@@ -394,12 +391,10 @@ public class ExecutableFlowBase extends ExecutableNode {
 
   /**
    * Only returns true if the status of all finished nodes is true.
-   *
-   * @return
    */
   public boolean isFlowFinished() {
-    for (String end : getEndNodes()) {
-      ExecutableNode node = getExecutableNode(end);
+    for (final String end : getEndNodes()) {
+      final ExecutableNode node = getExecutableNode(end);
       if (!Status.isStatusFinished(node.getStatus())) {
         return false;
       }
@@ -414,17 +409,15 @@ public class ExecutableFlowBase extends ExecutableNode {
    *
    * It will also return any subflow that has been completed such that the
    * FlowRunner can properly handle them.
-   *
-   * @param flow
-   * @return
    */
   public List<ExecutableNode> findNextJobsToRun() {
-    ArrayList<ExecutableNode> jobsToRun = new ArrayList<ExecutableNode>();
+    final ArrayList<ExecutableNode> jobsToRun = new ArrayList<>();
 
     if (isFlowFinished() && !Status.isStatusFinished(getStatus())) {
       jobsToRun.add(this);
     } else {
-      nodeloop: for (ExecutableNode node : executableNodes.values()) {
+      nodeloop:
+      for (final ExecutableNode node : this.executableNodes.values()) {
         if (Status.isStatusFinished(node.getStatus())) {
           continue;
         }
@@ -436,7 +429,7 @@ public class ExecutableFlowBase extends ExecutableNode {
         } else if (Status.isStatusRunning(node.getStatus())) {
           continue;
         } else {
-          for (String dependency : node.getInNodes()) {
+          for (final String dependency : node.getInNodes()) {
             // We find that the outer-loop is unfinished.
             if (!Status.isStatusFinished(getExecutableNode(dependency)
                 .getStatus())) {
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowPriorityComparator.java b/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowPriorityComparator.java
index 3050a8d..90c8d25 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowPriorityComparator.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutableFlowPriorityComparator.java
@@ -16,19 +16,18 @@
 
 package azkaban.executor;
 
+import azkaban.utils.Pair;
 import java.util.Comparator;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Pair;
-
 /**
  * Comparator implicitly used in priority queue for QueuedExecutions.
  */
 public final class ExecutableFlowPriorityComparator implements
-  Comparator<Pair<ExecutionReference, ExecutableFlow>> {
-  private static Logger logger = Logger
-    .getLogger(ExecutableFlowPriorityComparator.class);
+    Comparator<Pair<ExecutionReference, ExecutableFlow>> {
+
+  private static final Logger logger = Logger
+      .getLogger(ExecutableFlowPriorityComparator.class);
 
   /**
    * <pre>
@@ -43,8 +42,8 @@ public final class ExecutableFlowPriorityComparator implements
    * @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
    */
   @Override
-  public int compare(Pair<ExecutionReference, ExecutableFlow> pair1,
-    Pair<ExecutionReference, ExecutableFlow> pair2) {
+  public int compare(final Pair<ExecutionReference, ExecutableFlow> pair1,
+      final Pair<ExecutionReference, ExecutableFlow> pair2) {
     ExecutableFlow exflow1 = null, exflow2 = null;
     if (pair1 != null && pair1.getSecond() != null) {
       exflow1 = pair1.getSecond();
@@ -52,13 +51,13 @@ public final class ExecutableFlowPriorityComparator implements
     if (pair2 != null && pair2.getSecond() != null) {
       exflow2 = pair2.getSecond();
     }
-    if (exflow1 == null && exflow2 == null)
+    if (exflow1 == null && exflow2 == null) {
       return 0;
-    else if (exflow1 == null)
+    } else if (exflow1 == null) {
       return -1;
-    else if (exflow2 == null)
+    } else if (exflow2 == null) {
       return 1;
-    else {
+    } else {
       // descending order of priority
       int diff = getPriority(exflow2) - getPriority(exflow1);
       if (diff == 0) {
@@ -74,22 +73,22 @@ public final class ExecutableFlowPriorityComparator implements
   }
 
   /* Helper method to fetch flow priority from flow props */
-  private int getPriority(ExecutableFlow exflow) {
-    ExecutionOptions options = exflow.getExecutionOptions();
+  private int getPriority(final ExecutableFlow exflow) {
+    final ExecutionOptions options = exflow.getExecutionOptions();
     int priority = ExecutionOptions.DEFAULT_FLOW_PRIORITY;
     if (options != null
-      && options.getFlowParameters() != null
-      && options.getFlowParameters()
+        && options.getFlowParameters() != null
+        && options.getFlowParameters()
         .containsKey(ExecutionOptions.FLOW_PRIORITY)) {
       try {
         priority =
-          Integer.valueOf(options.getFlowParameters().get(
-            ExecutionOptions.FLOW_PRIORITY));
-      } catch (NumberFormatException ex) {
+            Integer.valueOf(options.getFlowParameters().get(
+                ExecutionOptions.FLOW_PRIORITY));
+      } catch (final NumberFormatException ex) {
         priority = ExecutionOptions.DEFAULT_FLOW_PRIORITY;
         logger.error(
-          "Failed to parse flow priority for exec_id = "
-            + exflow.getExecutionId(), ex);
+            "Failed to parse flow priority for exec_id = "
+                + exflow.getExecutionId(), ex);
       }
     }
     return priority;
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutableJobInfo.java b/azkaban-common/src/main/java/azkaban/executor/ExecutableJobInfo.java
index 94389ae..d3e98a7 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutableJobInfo.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutableJobInfo.java
@@ -16,14 +16,14 @@
 
 package azkaban.executor;
 
+import azkaban.utils.Pair;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import azkaban.utils.Pair;
-
 public class ExecutableJobInfo {
+
   private final int execId;
   private final int projectId;
   private final int version;
@@ -36,9 +36,10 @@ public class ExecutableJobInfo {
 
   private ArrayList<Pair<String, String>> jobPath;
 
-  public ExecutableJobInfo(int execId, int projectId, int version,
-      String flowId, String jobId, long startTime, long endTime, Status status,
-      int attempt) {
+  public ExecutableJobInfo(final int execId, final int projectId, final int version,
+      final String flowId, final String jobId, final long startTime, final long endTime,
+      final Status status,
+      final int attempt) {
     this.execId = execId;
     this.projectId = projectId;
     this.startTime = startTime;
@@ -53,98 +54,98 @@ public class ExecutableJobInfo {
   }
 
   public int getProjectId() {
-    return projectId;
+    return this.projectId;
   }
 
   public int getExecId() {
-    return execId;
+    return this.execId;
   }
 
   public int getVersion() {
-    return version;
+    return this.version;
   }
 
   public String getFlowId() {
-    return flowId;
+    return this.flowId;
   }
 
   public String getImmediateFlowId() {
-    if (jobPath.size() == 1) {
-      return flowId;
+    if (this.jobPath.size() == 1) {
+      return this.flowId;
     }
-    Pair<String, String> pair = jobPath.get(jobPath.size() - 1);
+    final Pair<String, String> pair = this.jobPath.get(this.jobPath.size() - 1);
     return pair.getSecond();
   }
 
   public String getHeadFlowId() {
-    Pair<String, String> pair = jobPath.get(0);
+    final Pair<String, String> pair = this.jobPath.get(0);
 
     return pair.getFirst();
   }
 
   public String getJobId() {
-    return jobId;
+    return this.jobId;
   }
 
   public long getStartTime() {
-    return startTime;
+    return this.startTime;
   }
 
   public long getEndTime() {
-    return endTime;
+    return this.endTime;
   }
 
   public Status getStatus() {
-    return status;
+    return this.status;
   }
 
   public int getAttempt() {
-    return attempt;
+    return this.attempt;
   }
 
   public List<Pair<String, String>> getParsedFlowId() {
-    return jobPath;
+    return this.jobPath;
   }
 
   private void parseFlowId() {
-    jobPath = new ArrayList<Pair<String, String>>();
-    String[] flowPairs = flowId.split(",");
+    this.jobPath = new ArrayList<>();
+    final String[] flowPairs = this.flowId.split(",");
 
-    for (String flowPair : flowPairs) {
-      String[] pairSplit = flowPair.split(":");
-      Pair<String, String> pair;
+    for (final String flowPair : flowPairs) {
+      final String[] pairSplit = flowPair.split(":");
+      final Pair<String, String> pair;
       if (pairSplit.length == 1) {
-        pair = new Pair<String, String>(pairSplit[0], pairSplit[0]);
+        pair = new Pair<>(pairSplit[0], pairSplit[0]);
       } else {
-        pair = new Pair<String, String>(pairSplit[0], pairSplit[1]);
+        pair = new Pair<>(pairSplit[0], pairSplit[1]);
       }
 
-      jobPath.add(pair);
+      this.jobPath.add(pair);
     }
   }
 
   public String getJobIdPath() {
     // Skip the first one because it's always just the root.
     String path = "";
-    for (int i = 1; i < jobPath.size(); ++i) {
-      Pair<String, String> pair = jobPath.get(i);
+    for (int i = 1; i < this.jobPath.size(); ++i) {
+      final Pair<String, String> pair = this.jobPath.get(i);
       path += pair.getFirst() + ":";
     }
 
-    path += jobId;
+    path += this.jobId;
     return path;
   }
 
   public Map<String, Object> toObject() {
-    HashMap<String, Object> map = new HashMap<String, Object>();
-    map.put("execId", execId);
-    map.put("version", version);
-    map.put("flowId", flowId);
-    map.put("jobId", jobId);
-    map.put("startTime", startTime);
-    map.put("endTime", endTime);
-    map.put("status", status.toString());
-    map.put("attempt", attempt);
+    final HashMap<String, Object> map = new HashMap<>();
+    map.put("execId", this.execId);
+    map.put("version", this.version);
+    map.put("flowId", this.flowId);
+    map.put("jobId", this.jobId);
+    map.put("startTime", this.startTime);
+    map.put("endTime", this.endTime);
+    map.put("status", this.status.toString());
+    map.put("attempt", this.attempt);
 
     return map;
   }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutableNode.java b/azkaban-common/src/main/java/azkaban/executor/ExecutableNode.java
index b4cab0f..34f99bb 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutableNode.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutableNode.java
@@ -16,6 +16,10 @@
 
 package azkaban.executor;
 
+import azkaban.flow.Node;
+import azkaban.utils.Props;
+import azkaban.utils.PropsUtils;
+import azkaban.utils.TypedMapWrapper;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -25,15 +29,11 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import azkaban.flow.Node;
-import azkaban.utils.Props;
-import azkaban.utils.PropsUtils;
-import azkaban.utils.TypedMapWrapper;
-
 /**
  * Base Executable that nodes and flows are based.
  */
 public class ExecutableNode {
+
   public static final String ID_PARAM = "id";
   public static final String STATUS_PARAM = "status";
   public static final String STARTTIME_PARAM = "startTime";
@@ -45,27 +45,22 @@ public class ExecutableNode {
   public static final String PROPS_SOURCE_PARAM = "propSource";
   public static final String JOB_SOURCE_PARAM = "jobSource";
   public static final String OUTPUT_PROPS_PARAM = "outputProps";
-
+  public static final String ATTEMPT_PARAM = "attempt";
+  public static final String PASTATTEMPTS_PARAM = "pastAttempts";
   private String id;
   private String type = null;
   private Status status = Status.READY;
   private long startTime = -1;
   private long endTime = -1;
   private long updateTime = -1;
-
   // Path to Job File
   private String jobSource;
   // Path to top level props file
   private String propsSource;
-  private Set<String> inNodes = new HashSet<String>();
-  private Set<String> outNodes = new HashSet<String>();
-
+  private Set<String> inNodes = new HashSet<>();
+  private Set<String> outNodes = new HashSet<>();
   private Props inputProps;
   private Props outputProps;
-
-  public static final String ATTEMPT_PARAM = "attempt";
-  public static final String PASTATTEMPTS_PARAM = "pastAttempts";
-
   private int attempt = 0;
   private long delayExecution = 0;
   private ArrayList<ExecutionAttempt> pastAttempts = null;
@@ -73,19 +68,19 @@ public class ExecutableNode {
   // Transient. These values aren't saved, but rediscovered.
   private ExecutableFlowBase parentFlow;
 
-  public ExecutableNode(Node node) {
+  public ExecutableNode(final Node node) {
     this.id = node.getId();
     this.jobSource = node.getJobSource();
     this.propsSource = node.getPropsSource();
   }
 
-  public ExecutableNode(Node node, ExecutableFlowBase parent) {
+  public ExecutableNode(final Node node, final ExecutableFlowBase parent) {
     this(node.getId(), node.getType(), node.getJobSource(), node
         .getPropsSource(), parent);
   }
 
-  public ExecutableNode(String id, String type, String jobSource,
-      String propsSource, ExecutableFlowBase parent) {
+  public ExecutableNode(final String id, final String type, final String jobSource,
+      final String propsSource, final ExecutableFlowBase parent) {
     this.id = id;
     this.jobSource = jobSource;
     this.propsSource = propsSource;
@@ -97,147 +92,147 @@ public class ExecutableNode {
   }
 
   public ExecutableFlow getExecutableFlow() {
-    if (parentFlow == null) {
+    if (this.parentFlow == null) {
       return null;
     }
 
-    return parentFlow.getExecutableFlow();
+    return this.parentFlow.getExecutableFlow();
   }
 
-  public void setParentFlow(ExecutableFlowBase flow) {
-    this.parentFlow = flow;
+  public ExecutableFlowBase getParentFlow() {
+    return this.parentFlow;
   }
 
-  public ExecutableFlowBase getParentFlow() {
-    return parentFlow;
+  public void setParentFlow(final ExecutableFlowBase flow) {
+    this.parentFlow = flow;
   }
 
   public String getId() {
-    return id;
+    return this.id;
   }
 
-  public void setId(String id) {
+  public void setId(final String id) {
     this.id = id;
   }
 
   public Status getStatus() {
-    return status;
+    return this.status;
   }
 
-  public String getType() {
-    return type;
+  public void setStatus(final Status status) {
+    this.status = status;
   }
 
-  public void setType(String type) {
-    this.type = type;
+  public String getType() {
+    return this.type;
   }
 
-  public void setStatus(Status status) {
-    this.status = status;
+  public void setType(final String type) {
+    this.type = type;
   }
 
   public long getStartTime() {
-    return startTime;
+    return this.startTime;
   }
 
-  public void setStartTime(long startTime) {
+  public void setStartTime(final long startTime) {
     this.startTime = startTime;
   }
 
   public long getEndTime() {
-    return endTime;
+    return this.endTime;
   }
 
-  public void setEndTime(long endTime) {
+  public void setEndTime(final long endTime) {
     this.endTime = endTime;
   }
 
   public long getUpdateTime() {
-    return updateTime;
+    return this.updateTime;
   }
 
-  public void setUpdateTime(long updateTime) {
+  public void setUpdateTime(final long updateTime) {
     this.updateTime = updateTime;
   }
 
-  public void addOutNode(String exNode) {
-    outNodes.add(exNode);
+  public void addOutNode(final String exNode) {
+    this.outNodes.add(exNode);
   }
 
-  public void addInNode(String exNode) {
-    inNodes.add(exNode);
+  public void addInNode(final String exNode) {
+    this.inNodes.add(exNode);
   }
 
   public Set<String> getOutNodes() {
-    return outNodes;
+    return this.outNodes;
   }
 
   public Set<String> getInNodes() {
-    return inNodes;
+    return this.inNodes;
   }
 
   public boolean hasJobSource() {
-    return jobSource != null;
+    return this.jobSource != null;
   }
 
   public boolean hasPropsSource() {
-    return propsSource != null;
+    return this.propsSource != null;
   }
 
   public String getJobSource() {
-    return jobSource;
+    return this.jobSource;
   }
 
   public String getPropsSource() {
-    return propsSource;
+    return this.propsSource;
   }
 
-  public void setInputProps(Props input) {
-    this.inputProps = input;
+  public Props getInputProps() {
+    return this.inputProps;
   }
 
-  public void setOutputProps(Props output) {
-    this.outputProps = output;
+  public void setInputProps(final Props input) {
+    this.inputProps = input;
   }
 
-  public Props getInputProps() {
-    return this.inputProps;
+  public Props getOutputProps() {
+    return this.outputProps;
   }
 
-  public Props getOutputProps() {
-    return outputProps;
+  public void setOutputProps(final Props output) {
+    this.outputProps = output;
   }
 
   public long getDelayedExecution() {
-    return delayExecution;
+    return this.delayExecution;
   }
 
-  public void setDelayedExecution(long delayMs) {
-    delayExecution = delayMs;
+  public void setDelayedExecution(final long delayMs) {
+    this.delayExecution = delayMs;
   }
 
   public List<ExecutionAttempt> getPastAttemptList() {
-    return pastAttempts;
+    return this.pastAttempts;
   }
 
   public int getAttempt() {
-    return attempt;
+    return this.attempt;
   }
 
-  public void setAttempt(int attempt) {
+  public void setAttempt(final int attempt) {
     this.attempt = attempt;
   }
 
   public void resetForRetry() {
-    ExecutionAttempt pastAttempt = new ExecutionAttempt(attempt, this);
-    attempt++;
+    final ExecutionAttempt pastAttempt = new ExecutionAttempt(this.attempt, this);
+    this.attempt++;
 
     synchronized (this) {
-      if (pastAttempts == null) {
-        pastAttempts = new ArrayList<ExecutionAttempt>();
+      if (this.pastAttempts == null) {
+        this.pastAttempts = new ArrayList<>();
       }
 
-      pastAttempts.add(pastAttempt);
+      this.pastAttempts.add(pastAttempt);
     }
 
     this.setStartTime(-1);
@@ -247,9 +242,9 @@ public class ExecutableNode {
   }
 
   public List<Object> getAttemptObjects() {
-    ArrayList<Object> array = new ArrayList<Object>();
+    final ArrayList<Object> array = new ArrayList<>();
 
-    for (ExecutionAttempt attempt : pastAttempts) {
+    for (final ExecutionAttempt attempt : this.pastAttempts) {
       array.add(attempt.toObject());
     }
 
@@ -260,7 +255,7 @@ public class ExecutableNode {
     return getPrintableId(":");
   }
 
-  public String getPrintableId(String delimiter) {
+  public String getPrintableId(final String delimiter) {
     if (this.getParentFlow() == null
         || this.getParentFlow() instanceof ExecutableFlow) {
       return getId();
@@ -269,26 +264,26 @@ public class ExecutableNode {
   }
 
   public Map<String, Object> toObject() {
-    Map<String, Object> mapObj = new HashMap<String, Object>();
+    final Map<String, Object> mapObj = new HashMap<>();
     fillMapFromExecutable(mapObj);
 
     return mapObj;
   }
 
-  protected void fillMapFromExecutable(Map<String, Object> objMap) {
+  protected void fillMapFromExecutable(final Map<String, Object> objMap) {
     objMap.put(ID_PARAM, this.id);
-    objMap.put(STATUS_PARAM, status.toString());
-    objMap.put(STARTTIME_PARAM, startTime);
-    objMap.put(ENDTIME_PARAM, endTime);
-    objMap.put(UPDATETIME_PARAM, updateTime);
-    objMap.put(TYPE_PARAM, type);
-    objMap.put(ATTEMPT_PARAM, attempt);
-
-    if (inNodes != null && !inNodes.isEmpty()) {
-      objMap.put(INNODES_PARAM, inNodes);
+    objMap.put(STATUS_PARAM, this.status.toString());
+    objMap.put(STARTTIME_PARAM, this.startTime);
+    objMap.put(ENDTIME_PARAM, this.endTime);
+    objMap.put(UPDATETIME_PARAM, this.updateTime);
+    objMap.put(TYPE_PARAM, this.type);
+    objMap.put(ATTEMPT_PARAM, this.attempt);
+
+    if (this.inNodes != null && !this.inNodes.isEmpty()) {
+      objMap.put(INNODES_PARAM, this.inNodes);
     }
-    if (outNodes != null && !outNodes.isEmpty()) {
-      objMap.put(OUTNODES_PARAM, outNodes);
+    if (this.outNodes != null && !this.outNodes.isEmpty()) {
+      objMap.put(OUTNODES_PARAM, this.outNodes);
     }
 
     if (hasPropsSource()) {
@@ -298,23 +293,22 @@ public class ExecutableNode {
       objMap.put(JOB_SOURCE_PARAM, this.jobSource);
     }
 
-    if (outputProps != null && outputProps.size() > 0) {
-      objMap.put(OUTPUT_PROPS_PARAM, PropsUtils.toStringMap(outputProps, true));
+    if (this.outputProps != null && this.outputProps.size() > 0) {
+      objMap.put(OUTPUT_PROPS_PARAM, PropsUtils.toStringMap(this.outputProps, true));
     }
 
-    if (pastAttempts != null) {
-      ArrayList<Object> attemptsList =
-          new ArrayList<Object>(pastAttempts.size());
-      for (ExecutionAttempt attempts : pastAttempts) {
+    if (this.pastAttempts != null) {
+      final ArrayList<Object> attemptsList =
+          new ArrayList<>(this.pastAttempts.size());
+      for (final ExecutionAttempt attempts : this.pastAttempts) {
         attemptsList.add(attempts.toObject());
       }
       objMap.put(PASTATTEMPTS_PARAM, attemptsList);
     }
   }
 
-  @SuppressWarnings("unchecked")
   public void fillExecutableFromMapObject(
-      TypedMapWrapper<String, Object> wrappedMap) {
+      final TypedMapWrapper<String, Object> wrappedMap) {
     this.id = wrappedMap.getString(ID_PARAM);
     this.type = wrappedMap.getString(TYPE_PARAM);
     this.status = Status.valueOf(wrappedMap.getString(STATUS_PARAM));
@@ -323,29 +317,29 @@ public class ExecutableNode {
     this.updateTime = wrappedMap.getLong(UPDATETIME_PARAM);
     this.attempt = wrappedMap.getInt(ATTEMPT_PARAM, 0);
 
-    this.inNodes = new HashSet<String>();
+    this.inNodes = new HashSet<>();
     this.inNodes.addAll(wrappedMap.getStringCollection(INNODES_PARAM,
-        Collections.<String> emptySet()));
+        Collections.<String>emptySet()));
 
-    this.outNodes = new HashSet<String>();
+    this.outNodes = new HashSet<>();
     this.outNodes.addAll(wrappedMap.getStringCollection(OUTNODES_PARAM,
-        Collections.<String> emptySet()));
+        Collections.<String>emptySet()));
 
     this.propsSource = wrappedMap.getString(PROPS_SOURCE_PARAM);
     this.jobSource = wrappedMap.getString(JOB_SOURCE_PARAM);
 
-    Map<String, String> outputProps =
-        wrappedMap.<String, String> getMap(OUTPUT_PROPS_PARAM);
+    final Map<String, String> outputProps =
+        wrappedMap.<String, String>getMap(OUTPUT_PROPS_PARAM);
     if (outputProps != null) {
       this.outputProps = new Props(null, outputProps);
     }
 
-    Collection<Object> pastAttempts =
-        wrappedMap.<Object> getCollection(PASTATTEMPTS_PARAM);
+    final Collection<Object> pastAttempts =
+        wrappedMap.<Object>getCollection(PASTATTEMPTS_PARAM);
     if (pastAttempts != null) {
-      ArrayList<ExecutionAttempt> attempts = new ArrayList<ExecutionAttempt>();
-      for (Object attemptObj : pastAttempts) {
-        ExecutionAttempt attempt = ExecutionAttempt.fromObject(attemptObj);
+      final ArrayList<ExecutionAttempt> attempts = new ArrayList<>();
+      for (final Object attemptObj : pastAttempts) {
+        final ExecutionAttempt attempt = ExecutionAttempt.fromObject(attemptObj);
         attempts.add(attempt);
       }
 
@@ -353,14 +347,14 @@ public class ExecutableNode {
     }
   }
 
-  public void fillExecutableFromMapObject(Map<String, Object> objMap) {
-    TypedMapWrapper<String, Object> wrapper =
-        new TypedMapWrapper<String, Object>(objMap);
+  public void fillExecutableFromMapObject(final Map<String, Object> objMap) {
+    final TypedMapWrapper<String, Object> wrapper =
+        new TypedMapWrapper<>(objMap);
     fillExecutableFromMapObject(wrapper);
   }
 
   public Map<String, Object> toUpdateObject() {
-    Map<String, Object> updatedNodeMap = new HashMap<String, Object>();
+    final Map<String, Object> updatedNodeMap = new HashMap<>();
     updatedNodeMap.put(ID_PARAM, getId());
     updatedNodeMap.put(STATUS_PARAM, getStatus().getNumVal());
     updatedNodeMap.put(STARTTIME_PARAM, getStartTime());
@@ -370,9 +364,9 @@ public class ExecutableNode {
     updatedNodeMap.put(ATTEMPT_PARAM, getAttempt());
 
     if (getAttempt() > 0) {
-      ArrayList<Map<String, Object>> pastAttempts =
-          new ArrayList<Map<String, Object>>();
-      for (ExecutionAttempt attempt : getPastAttemptList()) {
+      final ArrayList<Map<String, Object>> pastAttempts =
+          new ArrayList<>();
+      for (final ExecutionAttempt attempt : getPastAttemptList()) {
         pastAttempts.add(attempt.toObject());
       }
       updatedNodeMap.put(PASTATTEMPTS_PARAM, pastAttempts);
@@ -381,7 +375,7 @@ public class ExecutableNode {
     return updatedNodeMap;
   }
 
-  public void applyUpdateObject(TypedMapWrapper<String, Object> updateData) {
+  public void applyUpdateObject(final TypedMapWrapper<String, Object> updateData) {
     this.status =
         Status.fromInteger(updateData.getInt(STATUS_PARAM,
             this.status.getNumVal()));
@@ -390,21 +384,21 @@ public class ExecutableNode {
     this.endTime = updateData.getLong(ENDTIME_PARAM);
 
     if (updateData.containsKey(ATTEMPT_PARAM)) {
-      attempt = updateData.getInt(ATTEMPT_PARAM);
-      if (attempt > 0) {
-        updatePastAttempts(updateData.<Object> getList(PASTATTEMPTS_PARAM,
-            Collections.<Object> emptyList()));
+      this.attempt = updateData.getInt(ATTEMPT_PARAM);
+      if (this.attempt > 0) {
+        updatePastAttempts(updateData.<Object>getList(PASTATTEMPTS_PARAM,
+            Collections.<Object>emptyList()));
       }
     }
   }
 
-  public void applyUpdateObject(Map<String, Object> updateData) {
-    TypedMapWrapper<String, Object> wrapper =
-        new TypedMapWrapper<String, Object>(updateData);
+  public void applyUpdateObject(final Map<String, Object> updateData) {
+    final TypedMapWrapper<String, Object> wrapper =
+        new TypedMapWrapper<>(updateData);
     applyUpdateObject(wrapper);
   }
 
-  public void cancelNode(long cancelTime) {
+  public void cancelNode(final long cancelTime) {
     if (this.status == Status.DISABLED) {
       skipNode(cancelTime);
     } else {
@@ -415,21 +409,21 @@ public class ExecutableNode {
     }
   }
 
-  public void skipNode(long skipTime) {
+  public void skipNode(final long skipTime) {
     this.setStatus(Status.SKIPPED);
     this.setStartTime(skipTime);
     this.setEndTime(skipTime);
     this.setUpdateTime(skipTime);
   }
 
-  private void updatePastAttempts(List<Object> pastAttemptsList) {
+  private void updatePastAttempts(final List<Object> pastAttemptsList) {
     if (pastAttemptsList == null) {
       return;
     }
 
     synchronized (this) {
       if (this.pastAttempts == null) {
-        this.pastAttempts = new ArrayList<ExecutionAttempt>();
+        this.pastAttempts = new ArrayList<>();
       }
 
       // We just check size because past attempts don't change
@@ -437,9 +431,9 @@ public class ExecutableNode {
         return;
       }
 
-      Object[] pastAttemptArray = pastAttemptsList.toArray();
+      final Object[] pastAttemptArray = pastAttemptsList.toArray();
       for (int i = this.pastAttempts.size(); i < pastAttemptArray.length; ++i) {
-        ExecutionAttempt attempt =
+        final ExecutionAttempt attempt =
             ExecutionAttempt.fromObject(pastAttemptArray[i]);
         this.pastAttempts.add(attempt);
       }
@@ -447,10 +441,10 @@ public class ExecutableNode {
   }
 
   public int getRetries() {
-    return inputProps.getInt("retries", 0);
+    return this.inputProps.getInt("retries", 0);
   }
 
   public long getRetryBackoff() {
-    return inputProps.getLong("retry.backoff", 0);
+    return this.inputProps.getLong("retry.backoff", 0);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutionAttempt.java b/azkaban-common/src/main/java/azkaban/executor/ExecutionAttempt.java
index 1ca05d2..7c6394d 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutionAttempt.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutionAttempt.java
@@ -16,72 +16,70 @@
 
 package azkaban.executor;
 
+import azkaban.utils.TypedMapWrapper;
 import java.util.HashMap;
 import java.util.Map;
 
-import azkaban.utils.TypedMapWrapper;
-
 public class ExecutionAttempt {
+
   public static final String ATTEMPT_PARAM = "attempt";
   public static final String STATUS_PARAM = "status";
   public static final String STARTTIME_PARAM = "startTime";
   public static final String ENDTIME_PARAM = "endTime";
-
+  private final Status status;
   private int attempt = 0;
   private long startTime = -1;
   private long endTime = -1;
-  private Status status;
 
-  public ExecutionAttempt(int attempt, ExecutableNode executable) {
+  public ExecutionAttempt(final int attempt, final ExecutableNode executable) {
     this.attempt = attempt;
     this.startTime = executable.getStartTime();
     this.endTime = executable.getEndTime();
     this.status = executable.getStatus();
   }
 
-  public ExecutionAttempt(int attempt, long startTime, long endTime,
-      Status status) {
+  public ExecutionAttempt(final int attempt, final long startTime, final long endTime,
+      final Status status) {
     this.attempt = attempt;
     this.startTime = startTime;
     this.endTime = endTime;
     this.status = status;
   }
 
+  public static ExecutionAttempt fromObject(final Object obj) {
+    final Map<String, Object> map = (Map<String, Object>) obj;
+    final TypedMapWrapper<String, Object> wrapper =
+        new TypedMapWrapper<>(map);
+    final int attempt = wrapper.getInt(ATTEMPT_PARAM);
+    final long startTime = wrapper.getLong(STARTTIME_PARAM);
+    final long endTime = wrapper.getLong(ENDTIME_PARAM);
+    final Status status = Status.valueOf(wrapper.getString(STATUS_PARAM));
+
+    return new ExecutionAttempt(attempt, startTime, endTime, status);
+  }
+
   public long getStartTime() {
-    return startTime;
+    return this.startTime;
   }
 
   public long getEndTime() {
-    return endTime;
+    return this.endTime;
   }
 
   public Status getStatus() {
-    return status;
+    return this.status;
   }
 
   public int getAttempt() {
-    return attempt;
-  }
-
-  public static ExecutionAttempt fromObject(Object obj) {
-    @SuppressWarnings("unchecked")
-    Map<String, Object> map = (Map<String, Object>) obj;
-    TypedMapWrapper<String, Object> wrapper =
-        new TypedMapWrapper<String, Object>(map);
-    int attempt = wrapper.getInt(ATTEMPT_PARAM);
-    long startTime = wrapper.getLong(STARTTIME_PARAM);
-    long endTime = wrapper.getLong(ENDTIME_PARAM);
-    Status status = Status.valueOf(wrapper.getString(STATUS_PARAM));
-
-    return new ExecutionAttempt(attempt, startTime, endTime, status);
+    return this.attempt;
   }
 
   public Map<String, Object> toObject() {
-    HashMap<String, Object> attempts = new HashMap<String, Object>();
-    attempts.put(ATTEMPT_PARAM, attempt);
-    attempts.put(STARTTIME_PARAM, startTime);
-    attempts.put(ENDTIME_PARAM, endTime);
-    attempts.put(STATUS_PARAM, status.toString());
+    final HashMap<String, Object> attempts = new HashMap<>();
+    attempts.put(ATTEMPT_PARAM, this.attempt);
+    attempts.put(STARTTIME_PARAM, this.startTime);
+    attempts.put(ENDTIME_PARAM, this.endTime);
+    attempts.put(STATUS_PARAM, this.status.toString());
     return attempts;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutionOptions.java b/azkaban-common/src/main/java/azkaban/executor/ExecutionOptions.java
index d8b10f1..ad47ff8 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutionOptions.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutionOptions.java
@@ -16,6 +16,8 @@
 
 package azkaban.executor;
 
+import azkaban.executor.mail.DefaultMailCreator;
+import azkaban.utils.TypedMapWrapper;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -23,13 +25,11 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import azkaban.executor.mail.DefaultMailCreator;
-import azkaban.utils.TypedMapWrapper;
-
 /**
  * Execution options for submitted flows and scheduled flows
  */
 public class ExecutionOptions {
+
   public static final String CONCURRENT_OPTION_SKIP = "skip";
   public static final String CONCURRENT_OPTION_PIPELINE = "pipeline";
   public static final String CONCURRENT_OPTION_IGNORE = "ignore";
@@ -58,8 +58,8 @@ public class ExecutionOptions {
   private boolean notifyOnLastFailure = false;
   private boolean failureEmailsOverride = false;
   private boolean successEmailsOverride = false;
-  private ArrayList<String> failureEmails = new ArrayList<String>();
-  private ArrayList<String> successEmails = new ArrayList<String>();
+  private ArrayList<String> failureEmails = new ArrayList<>();
+  private ArrayList<String> successEmails = new ArrayList<>();
 
   private Integer pipelineLevel = null;
   private Integer pipelineExecId = null;
@@ -67,208 +67,205 @@ public class ExecutionOptions {
   private String concurrentOption = CONCURRENT_OPTION_IGNORE;
   private String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
   private boolean memoryCheck = true;
-  private Map<String, String> flowParameters = new HashMap<String, String>();
+  private Map<String, String> flowParameters = new HashMap<>();
+  private FailureAction failureAction = FailureAction.FINISH_CURRENTLY_RUNNING;
+  private List<Object> initiallyDisabledJobs = new ArrayList<>();
 
-  public enum FailureAction {
-    FINISH_CURRENTLY_RUNNING, CANCEL_ALL, FINISH_ALL_POSSIBLE
-  }
+  public static ExecutionOptions createFromObject(final Object obj) {
+    if (obj == null || !(obj instanceof Map)) {
+      return null;
+    }
 
-  private FailureAction failureAction = FailureAction.FINISH_CURRENTLY_RUNNING;
+    final Map<String, Object> optionsMap = (Map<String, Object>) obj;
+    final TypedMapWrapper<String, Object> wrapper =
+        new TypedMapWrapper<>(optionsMap);
+
+    final ExecutionOptions options = new ExecutionOptions();
+    if (optionsMap.containsKey(FLOW_PARAMETERS)) {
+      options.flowParameters = new HashMap<>();
+      options.flowParameters.putAll(wrapper
+          .<String, String>getMap(FLOW_PARAMETERS));
+    }
+    // Failure notification
+    options.notifyOnFirstFailure =
+        wrapper.getBool(NOTIFY_ON_FIRST_FAILURE, options.notifyOnFirstFailure);
+    options.notifyOnLastFailure =
+        wrapper.getBool(NOTIFY_ON_LAST_FAILURE, options.notifyOnLastFailure);
+    options.concurrentOption =
+        wrapper.getString(CONCURRENT_OPTION, options.concurrentOption);
+
+    if (wrapper.containsKey(DISABLE)) {
+      options.initiallyDisabledJobs = wrapper.<Object>getList(DISABLE);
+    }
+
+    if (optionsMap.containsKey(MAIL_CREATOR)) {
+      options.mailCreator = (String) optionsMap.get(MAIL_CREATOR);
+    }
+
+    // Failure action
+    options.failureAction =
+        FailureAction.valueOf(wrapper.getString(FAILURE_ACTION,
+            options.failureAction.toString()));
+    options.pipelineLevel =
+        wrapper.getInt(PIPELINE_LEVEL, options.pipelineLevel);
+    options.pipelineExecId =
+        wrapper.getInt(PIPELINE_EXECID, options.pipelineExecId);
+    options.queueLevel = wrapper.getInt(QUEUE_LEVEL, options.queueLevel);
+
+    // Success emails
+    options.setSuccessEmails(wrapper.<String>getList(SUCCESS_EMAILS,
+        Collections.<String>emptyList()));
+    options.setFailureEmails(wrapper.<String>getList(FAILURE_EMAILS,
+        Collections.<String>emptyList()));
+
+    options.setSuccessEmailsOverridden(wrapper.getBool(SUCCESS_EMAILS_OVERRIDE,
+        false));
+    options.setFailureEmailsOverridden(wrapper.getBool(FAILURE_EMAILS_OVERRIDE,
+        false));
 
-  private List<Object> initiallyDisabledJobs = new ArrayList<Object>();
+    options.setMemoryCheck(wrapper.getBool(MEMORY_CHECK, true));
 
-  public void addAllFlowParameters(Map<String, String> flowParam) {
-    flowParameters.putAll(flowParam);
+    return options;
   }
 
-  public Map<String, String> getFlowParameters() {
-    return flowParameters;
+  public void addAllFlowParameters(final Map<String, String> flowParam) {
+    this.flowParameters.putAll(flowParam);
   }
 
-  public void setFailureEmails(Collection<String> emails) {
-    failureEmails = new ArrayList<String>(emails);
+  public Map<String, String> getFlowParameters() {
+    return this.flowParameters;
   }
 
   public boolean isFailureEmailsOverridden() {
     return this.failureEmailsOverride;
   }
 
+  public void setFailureEmailsOverridden(final boolean override) {
+    this.failureEmailsOverride = override;
+  }
+
   public boolean isSuccessEmailsOverridden() {
     return this.successEmailsOverride;
   }
 
-  public void setSuccessEmailsOverridden(boolean override) {
+  public void setSuccessEmailsOverridden(final boolean override) {
     this.successEmailsOverride = override;
   }
 
-  public void setFailureEmailsOverridden(boolean override) {
-    this.failureEmailsOverride = override;
-  }
-
   public List<String> getFailureEmails() {
-    return failureEmails;
+    return this.failureEmails;
   }
 
-  public void setSuccessEmails(Collection<String> emails) {
-    successEmails = new ArrayList<String>(emails);
+  public void setFailureEmails(final Collection<String> emails) {
+    this.failureEmails = new ArrayList<>(emails);
   }
 
   public List<String> getSuccessEmails() {
-    return successEmails;
+    return this.successEmails;
   }
 
-  public boolean getNotifyOnFirstFailure() {
-    return notifyOnFirstFailure;
+  public void setSuccessEmails(final Collection<String> emails) {
+    this.successEmails = new ArrayList<>(emails);
   }
 
-  public boolean getNotifyOnLastFailure() {
-    return notifyOnLastFailure;
+  public boolean getNotifyOnFirstFailure() {
+    return this.notifyOnFirstFailure;
   }
 
-  public void setNotifyOnFirstFailure(boolean notify) {
+  public void setNotifyOnFirstFailure(final boolean notify) {
     this.notifyOnFirstFailure = notify;
   }
 
-  public void setNotifyOnLastFailure(boolean notify) {
+  public boolean getNotifyOnLastFailure() {
+    return this.notifyOnLastFailure;
+  }
+
+  public void setNotifyOnLastFailure(final boolean notify) {
     this.notifyOnLastFailure = notify;
   }
 
   public FailureAction getFailureAction() {
-    return failureAction;
+    return this.failureAction;
   }
 
-  public void setFailureAction(FailureAction action) {
-    failureAction = action;
+  public void setFailureAction(final FailureAction action) {
+    this.failureAction = action;
   }
 
-  public void setConcurrentOption(String concurrentOption) {
-    this.concurrentOption = concurrentOption;
+  public String getConcurrentOption() {
+    return this.concurrentOption;
   }
 
-  public void setMailCreator(String mailCreator) {
-    this.mailCreator = mailCreator;
+  public void setConcurrentOption(final String concurrentOption) {
+    this.concurrentOption = concurrentOption;
   }
 
-  public String getConcurrentOption() {
-    return concurrentOption;
+  public String getMailCreator() {
+    return this.mailCreator;
   }
 
-  public String getMailCreator() {
-    return mailCreator;
+  public void setMailCreator(final String mailCreator) {
+    this.mailCreator = mailCreator;
   }
 
   public Integer getPipelineLevel() {
-    return pipelineLevel;
+    return this.pipelineLevel;
   }
 
-  public Integer getPipelineExecutionId() {
-    return pipelineExecId;
+  public void setPipelineLevel(final Integer level) {
+    this.pipelineLevel = level;
   }
 
-  public void setPipelineLevel(Integer level) {
-    pipelineLevel = level;
+  public Integer getPipelineExecutionId() {
+    return this.pipelineExecId;
   }
 
-  public void setPipelineExecutionId(Integer id) {
+  public void setPipelineExecutionId(final Integer id) {
     this.pipelineExecId = id;
   }
 
   public Integer getQueueLevel() {
-    return queueLevel;
+    return this.queueLevel;
   }
 
   public List<Object> getDisabledJobs() {
-    return new ArrayList<Object>(initiallyDisabledJobs);
+    return new ArrayList<>(this.initiallyDisabledJobs);
   }
 
-  public void setDisabledJobs(List<Object> disabledJobs) {
-    initiallyDisabledJobs = disabledJobs;
+  public void setDisabledJobs(final List<Object> disabledJobs) {
+    this.initiallyDisabledJobs = disabledJobs;
   }
 
   public boolean getMemoryCheck() {
-    return memoryCheck;
+    return this.memoryCheck;
   }
 
-  public void setMemoryCheck(boolean memoryCheck) {
+  public void setMemoryCheck(final boolean memoryCheck) {
     this.memoryCheck = memoryCheck;
   }
 
   public Map<String, Object> toObject() {
-    HashMap<String, Object> flowOptionObj = new HashMap<String, Object>();
+    final HashMap<String, Object> flowOptionObj = new HashMap<>();
 
     flowOptionObj.put(FLOW_PARAMETERS, this.flowParameters);
     flowOptionObj.put(NOTIFY_ON_FIRST_FAILURE, this.notifyOnFirstFailure);
     flowOptionObj.put(NOTIFY_ON_LAST_FAILURE, this.notifyOnLastFailure);
-    flowOptionObj.put(SUCCESS_EMAILS, successEmails);
-    flowOptionObj.put(FAILURE_EMAILS, failureEmails);
-    flowOptionObj.put(FAILURE_ACTION, failureAction.toString());
-    flowOptionObj.put(PIPELINE_LEVEL, pipelineLevel);
-    flowOptionObj.put(PIPELINE_EXECID, pipelineExecId);
-    flowOptionObj.put(QUEUE_LEVEL, queueLevel);
-    flowOptionObj.put(CONCURRENT_OPTION, concurrentOption);
-    flowOptionObj.put(DISABLE, initiallyDisabledJobs);
-    flowOptionObj.put(FAILURE_EMAILS_OVERRIDE, failureEmailsOverride);
-    flowOptionObj.put(SUCCESS_EMAILS_OVERRIDE, successEmailsOverride);
-    flowOptionObj.put(MAIL_CREATOR, mailCreator);
-    flowOptionObj.put(MEMORY_CHECK, memoryCheck);
+    flowOptionObj.put(SUCCESS_EMAILS, this.successEmails);
+    flowOptionObj.put(FAILURE_EMAILS, this.failureEmails);
+    flowOptionObj.put(FAILURE_ACTION, this.failureAction.toString());
+    flowOptionObj.put(PIPELINE_LEVEL, this.pipelineLevel);
+    flowOptionObj.put(PIPELINE_EXECID, this.pipelineExecId);
+    flowOptionObj.put(QUEUE_LEVEL, this.queueLevel);
+    flowOptionObj.put(CONCURRENT_OPTION, this.concurrentOption);
+    flowOptionObj.put(DISABLE, this.initiallyDisabledJobs);
+    flowOptionObj.put(FAILURE_EMAILS_OVERRIDE, this.failureEmailsOverride);
+    flowOptionObj.put(SUCCESS_EMAILS_OVERRIDE, this.successEmailsOverride);
+    flowOptionObj.put(MAIL_CREATOR, this.mailCreator);
+    flowOptionObj.put(MEMORY_CHECK, this.memoryCheck);
     return flowOptionObj;
   }
 
-  @SuppressWarnings("unchecked")
-  public static ExecutionOptions createFromObject(Object obj) {
-    if (obj == null || !(obj instanceof Map)) {
-      return null;
-    }
-
-    Map<String, Object> optionsMap = (Map<String, Object>) obj;
-    TypedMapWrapper<String, Object> wrapper =
-        new TypedMapWrapper<String, Object>(optionsMap);
-
-    ExecutionOptions options = new ExecutionOptions();
-    if (optionsMap.containsKey(FLOW_PARAMETERS)) {
-      options.flowParameters = new HashMap<String, String>();
-      options.flowParameters.putAll(wrapper
-          .<String, String> getMap(FLOW_PARAMETERS));
-    }
-    // Failure notification
-    options.notifyOnFirstFailure =
-        wrapper.getBool(NOTIFY_ON_FIRST_FAILURE, options.notifyOnFirstFailure);
-    options.notifyOnLastFailure =
-        wrapper.getBool(NOTIFY_ON_LAST_FAILURE, options.notifyOnLastFailure);
-    options.concurrentOption =
-        wrapper.getString(CONCURRENT_OPTION, options.concurrentOption);
-
-    if (wrapper.containsKey(DISABLE)) {
-      options.initiallyDisabledJobs = wrapper.<Object> getList(DISABLE);
-    }
-
-    if (optionsMap.containsKey(MAIL_CREATOR)) {
-      options.mailCreator = (String) optionsMap.get(MAIL_CREATOR);
-    }
-
-    // Failure action
-    options.failureAction =
-        FailureAction.valueOf(wrapper.getString(FAILURE_ACTION,
-            options.failureAction.toString()));
-    options.pipelineLevel =
-        wrapper.getInt(PIPELINE_LEVEL, options.pipelineLevel);
-    options.pipelineExecId =
-        wrapper.getInt(PIPELINE_EXECID, options.pipelineExecId);
-    options.queueLevel = wrapper.getInt(QUEUE_LEVEL, options.queueLevel);
-
-    // Success emails
-    options.setSuccessEmails(wrapper.<String> getList(SUCCESS_EMAILS,
-        Collections.<String> emptyList()));
-    options.setFailureEmails(wrapper.<String> getList(FAILURE_EMAILS,
-        Collections.<String> emptyList()));
-
-    options.setSuccessEmailsOverridden(wrapper.getBool(SUCCESS_EMAILS_OVERRIDE,
-        false));
-    options.setFailureEmailsOverridden(wrapper.getBool(FAILURE_EMAILS_OVERRIDE,
-        false));
-
-    options.setMemoryCheck(wrapper.getBool(MEMORY_CHECK, true));
-
-    return options;
+  public enum FailureAction {
+    FINISH_CURRENTLY_RUNNING, CANCEL_ALL, FINISH_ALL_POSSIBLE
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutionReference.java b/azkaban-common/src/main/java/azkaban/executor/ExecutionReference.java
index 9d93476..c0bf5c3 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutionReference.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutionReference.java
@@ -17,6 +17,7 @@
 package azkaban.executor;
 
 public class ExecutionReference {
+
   private final int execId;
   private Executor executor;
   private long updateTime;
@@ -24,60 +25,60 @@ public class ExecutionReference {
   private int numErrors = 0;
 
 
-  public ExecutionReference(int execId) {
+  public ExecutionReference(final int execId) {
     this.execId = execId;
   }
 
-  public ExecutionReference(int execId, Executor executor) {
+  public ExecutionReference(final int execId, final Executor executor) {
     if (executor == null) {
       throw new IllegalArgumentException(String.format(
-        "Executor cannot be null for exec id: %d ExecutionReference", execId));
+          "Executor cannot be null for exec id: %d ExecutionReference", execId));
     }
     this.execId = execId;
     this.executor = executor;
   }
 
-  public void setUpdateTime(long updateTime) {
-    this.updateTime = updateTime;
+  public long getUpdateTime() {
+    return this.updateTime;
   }
 
-  public void setNextCheckTime(long nextCheckTime) {
-    this.nextCheckTime = nextCheckTime;
+  public void setUpdateTime(final long updateTime) {
+    this.updateTime = updateTime;
   }
 
-  public long getUpdateTime() {
-    return updateTime;
+  public long getNextCheckTime() {
+    return this.nextCheckTime;
   }
 
-  public long getNextCheckTime() {
-    return nextCheckTime;
+  public void setNextCheckTime(final long nextCheckTime) {
+    this.nextCheckTime = nextCheckTime;
   }
 
   public int getExecId() {
-    return execId;
+    return this.execId;
   }
 
   public String getHost() {
-    return executor.getHost();
+    return this.executor.getHost();
   }
 
   public int getPort() {
-    return executor.getPort();
+    return this.executor.getPort();
   }
 
   public int getNumErrors() {
-    return numErrors;
+    return this.numErrors;
   }
 
-  public void setNumErrors(int numErrors) {
+  public void setNumErrors(final int numErrors) {
     this.numErrors = numErrors;
   }
 
-  public void setExecutor(Executor executor) {
-    this.executor = executor;
+  public Executor getExecutor() {
+    return this.executor;
   }
 
-  public Executor getExecutor() {
-    return executor;
+  public void setExecutor(final Executor executor) {
+    this.executor = executor;
   }
 }
\ No newline at end of file
diff --git a/azkaban-common/src/main/java/azkaban/executor/Executor.java b/azkaban-common/src/main/java/azkaban/executor/Executor.java
index f0600ab..04a3434 100644
--- a/azkaban-common/src/main/java/azkaban/executor/Executor.java
+++ b/azkaban-common/src/main/java/azkaban/executor/Executor.java
@@ -16,8 +16,8 @@
 
 package azkaban.executor;
 
-import java.util.Date;
 import azkaban.utils.Utils;
+import java.util.Date;
 
 /**
  * Class to represent an AzkabanExecutorServer details for ExecutorManager
@@ -25,6 +25,7 @@ import azkaban.utils.Utils;
  * @author gaggarwa
  */
 public class Executor implements Comparable<Executor> {
+
   private final int id;
   private final String host;
   private final int port;
@@ -39,15 +40,11 @@ public class Executor implements Comparable<Executor> {
    * Note: port should be a within unsigned 2 byte
    * integer range
    * </pre>
-   *
-   * @param executor_id
-   * @param executor_host
-   * @param executor_port
    */
-  public Executor(int id, String host, int port, boolean isActive) {
+  public Executor(final int id, final String host, final int port, final boolean isActive) {
     if (!Utils.isValidPort(port)) {
       throw new IllegalArgumentException(String.format(
-        "Invalid port number %d for host %s, executor_id %d", port, host, id));
+          "Invalid port number %d for host %s, executor_id %d", port, host, id));
     }
 
     this.id = id;
@@ -60,83 +57,92 @@ public class Executor implements Comparable<Executor> {
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + (isActive ? 1231 : 1237);
-    result = prime * result + ((host == null) ? 0 : host.hashCode());
-    result = prime * result + id;
-    result = prime * result + port;
+    result = prime * result + (this.isActive ? 1231 : 1237);
+    result = prime * result + ((this.host == null) ? 0 : this.host.hashCode());
+    result = prime * result + this.id;
+    result = prime * result + this.port;
     return result;
   }
 
   @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
+  public boolean equals(final Object obj) {
+    if (this == obj) {
       return true;
-    if (obj == null)
+    }
+    if (obj == null) {
       return false;
-    if (!(obj instanceof Executor))
+    }
+    if (!(obj instanceof Executor)) {
       return false;
-    Executor other = (Executor) obj;
-    if (isActive != other.isActive)
+    }
+    final Executor other = (Executor) obj;
+    if (this.isActive != other.isActive) {
       return false;
-    if (host == null) {
-      if (other.host != null)
+    }
+    if (this.host == null) {
+      if (other.host != null) {
         return false;
-    } else if (!host.equals(other.host))
+      }
+    } else if (!this.host.equals(other.host)) {
       return false;
-    if (id != other.id)
+    }
+    if (this.id != other.id) {
       return false;
-    if (port != other.port)
+    }
+    if (this.port != other.port) {
       return false;
+    }
     return true;
   }
 
   @Override
-  public String toString(){
+  public String toString() {
     return String.format("%s:%s (id: %s)",
-      null == this.host || this.host.length() == 0 ? "(empty)" : this.host,
-      this.port, this.id);
+        null == this.host || this.host.length() == 0 ? "(empty)" : this.host,
+        this.port, this.id);
   }
 
   public String getHost() {
-    return host;
+    return this.host;
   }
 
   public int getPort() {
-    return port;
+    return this.port;
   }
 
   public boolean isActive() {
-    return isActive;
+    return this.isActive;
+  }
+
+  public void setActive(final boolean isActive) {
+    this.isActive = isActive;
   }
 
   public int getId() {
-    return id;
+    return this.id;
   }
 
   public ExecutorInfo getExecutorInfo() {
     return this.cachedExecutorStats;
   }
 
-  public void setExecutorInfo(ExecutorInfo info) {
+  public void setExecutorInfo(final ExecutorInfo info) {
     this.cachedExecutorStats = info;
     this.lastStatsUpdatedTime = new Date();
   }
 
   /**
    * Gets the timestamp when the executor info is last updated.
-   * @return date object represents the timestamp, null if the executor info of this
-   *         specific executor is never refreshed.
-   * */
-  public Date getLastStatsUpdatedTime(){
+   *
+   * @return date object represents the timestamp, null if the executor info of this specific
+   * executor is never refreshed.
+   */
+  public Date getLastStatsUpdatedTime() {
     return this.lastStatsUpdatedTime;
   }
 
-  public void setActive(boolean isActive) {
-    this.isActive = isActive;
-  }
-
   @Override
-  public int compareTo(Executor o) {
+  public int compareTo(final Executor o) {
     return null == o ? 1 : this.hashCode() - o.hashCode();
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutorApiClient.java b/azkaban-common/src/main/java/azkaban/executor/ExecutorApiClient.java
index 2ab814d..cd3d598 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutorApiClient.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutorApiClient.java
@@ -16,47 +16,54 @@
 
 package azkaban.executor;
 
+import azkaban.utils.RestfulApiClient;
 import java.io.IOException;
 import org.apache.http.HttpResponse;
 import org.apache.http.StatusLine;
 import org.apache.http.client.HttpResponseException;
 import org.apache.http.util.EntityUtils;
-import azkaban.utils.RestfulApiClient;
 
-/** Client class that will be used to handle all Restful API calls between Executor and the host application.
- * */
+/**
+ * Client class that will be used to handle all Restful API calls between Executor and the host
+ * application.
+ */
 public class ExecutorApiClient extends RestfulApiClient<String> {
+
   private static ExecutorApiClient instance = null;
-  private ExecutorApiClient(){}
+
+  private ExecutorApiClient() {
+  }
 
   /**
    * Singleton method to return the instance of the current object.
-   * */
-  public static ExecutorApiClient getInstance(){
-    if (null == instance){
+   */
+  public static ExecutorApiClient getInstance() {
+    if (null == instance) {
       instance = new ExecutorApiClient();
     }
 
     return instance;
   }
 
-  /**Implementing the parseResponse function to return de-serialized Json object.
-   * @param response  the returned response from the HttpClient.
+  /**
+   * Implementing the parseResponse function to return de-serialized Json object.
+   *
+   * @param response the returned response from the HttpClient.
    * @return de-serialized object from Json or null if the response doesn't have a body.
-   * */
+   */
   @Override
-  protected String parseResponse(HttpResponse response)
+  protected String parseResponse(final HttpResponse response)
       throws HttpResponseException, IOException {
     final StatusLine statusLine = response.getStatusLine();
-    String responseBody = response.getEntity() != null ?
+    final String responseBody = response.getEntity() != null ?
         EntityUtils.toString(response.getEntity()) : "";
 
     if (statusLine.getStatusCode() >= 300) {
 
-        logger.error(String.format("unable to parse response as the response status is %s",
-            statusLine.getStatusCode()));
+      logger.error(String.format("unable to parse response as the response status is %s",
+          statusLine.getStatusCode()));
 
-        throw new HttpResponseException(statusLine.getStatusCode(),responseBody);
+      throw new HttpResponseException(statusLine.getStatusCode(), responseBody);
     }
 
     return responseBody;
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutorInfo.java b/azkaban-common/src/main/java/azkaban/executor/ExecutorInfo.java
index 980cd8e..250c43a 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutorInfo.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutorInfo.java
@@ -17,121 +17,121 @@
 package azkaban.executor;
 
 import java.io.IOException;
-
-import org.codehaus.jackson.JsonParseException;
-import org.codehaus.jackson.map.JsonMappingException;
 import org.codehaus.jackson.map.ObjectMapper;
 
- /** Class that exposes the statistics from the executor server.
-  *  List of the statistics -
-  *  remainingMemoryPercent;
-  *  remainingMemory;
-  *  remainingFlowCapacity;
-  *  numberOfAssignedFlows;
-  *  lastDispatchedTime;
-  *  cpuUsage;
-  *
-  * */
-  public class ExecutorInfo implements java.io.Serializable{
-    private static final long serialVersionUID = 3009746603773371263L;
-    private double remainingMemoryPercent;
-    private long   remainingMemoryInMB;
-    private int    remainingFlowCapacity;
-    private int    numberOfAssignedFlows;
-    private long   lastDispatchedTime;
-    private double cpuUsage;
-
-    public double getCpuUsage() {
-      return this.cpuUsage;
-    }
-
-    public void setCpuUpsage(double value){
-      this.cpuUsage = value;
-    }
-
-    public double getRemainingMemoryPercent() {
-      return this.remainingMemoryPercent;
-    }
-
-    public void setRemainingMemoryPercent(double value){
-      this.remainingMemoryPercent = value;
-    }
-
-    public long getRemainingMemoryInMB(){
-      return this.remainingMemoryInMB;
-    }
-
-    public void setRemainingMemoryInMB(long value){
-      this.remainingMemoryInMB = value;
-    }
-
-    public int getRemainingFlowCapacity(){
-      return this.remainingFlowCapacity;
-    }
-
-    public void setRemainingFlowCapacity(int value){
-      this.remainingFlowCapacity = value;
-    }
-
-    public long getLastDispatchedTime(){
-      return this.lastDispatchedTime;
-    }
-
-    public void setLastDispatchedTime(long value){
-      this.lastDispatchedTime = value;
-    }
-
-    public int getNumberOfAssignedFlows () {
-      return this.numberOfAssignedFlows;
-    }
-
-    public void setNumberOfAssignedFlows (int value) {
-      this.numberOfAssignedFlows = value;
-    }
-
-    public ExecutorInfo(){}
-
-    public ExecutorInfo (double remainingMemoryPercent,
-        long remainingMemory,
-        int remainingFlowCapacity,
-        long lastDispatched,
-        double cpuUsage,
-        int numberOfAssignedFlows){
-      this.remainingMemoryInMB = remainingMemory;
-      this.cpuUsage = cpuUsage;
-      this.remainingFlowCapacity = remainingFlowCapacity;
-      this.remainingMemoryPercent = remainingMemoryPercent;
-      this.lastDispatchedTime = lastDispatched;
-      this.numberOfAssignedFlows = numberOfAssignedFlows;
-    }
-
-    @Override
-    public boolean equals(Object obj)
-    {
-        if (obj instanceof ExecutorInfo)
-        {
-          boolean result = true;
-          ExecutorInfo stat = (ExecutorInfo) obj;
-
-          result &=this.remainingMemoryInMB == stat.remainingMemoryInMB;
-          result &=this.cpuUsage == stat.cpuUsage;
-          result &=this.remainingFlowCapacity == stat.remainingFlowCapacity;
-          result &=this.remainingMemoryPercent == stat.remainingMemoryPercent;
-          result &=this.numberOfAssignedFlows == stat.numberOfAssignedFlows;
-          result &= this.lastDispatchedTime == stat.lastDispatchedTime;
-          return result;
-        }
-        return false;
+/**
+ * Class that exposes the statistics from the executor server.
+ * List of the statistics -
+ * remainingMemoryPercent;
+ * remainingMemory;
+ * remainingFlowCapacity;
+ * numberOfAssignedFlows;
+ * lastDispatchedTime;
+ * cpuUsage;
+ */
+public class ExecutorInfo implements java.io.Serializable {
+
+  private static final long serialVersionUID = 3009746603773371263L;
+  private double remainingMemoryPercent;
+  private long remainingMemoryInMB;
+  private int remainingFlowCapacity;
+  private int numberOfAssignedFlows;
+  private long lastDispatchedTime;
+  private double cpuUsage;
+
+  public ExecutorInfo() {
+  }
+
+  public ExecutorInfo(final double remainingMemoryPercent,
+      final long remainingMemory,
+      final int remainingFlowCapacity,
+      final long lastDispatched,
+      final double cpuUsage,
+      final int numberOfAssignedFlows) {
+    this.remainingMemoryInMB = remainingMemory;
+    this.cpuUsage = cpuUsage;
+    this.remainingFlowCapacity = remainingFlowCapacity;
+    this.remainingMemoryPercent = remainingMemoryPercent;
+    this.lastDispatchedTime = lastDispatched;
+    this.numberOfAssignedFlows = numberOfAssignedFlows;
+  }
+
+  /**
+   * Helper function to get an ExecutorInfo instance from the JSon String serialized from another
+   * object.
+   *
+   * @param jsonString the string that will be de-serialized from.
+   * @return instance of the object if the parsing is successful, null other wise.
+   */
+  public static ExecutorInfo fromJSONString(final String jsonString) throws IOException {
+    if (null == jsonString || jsonString.length() == 0) {
+      return null;
     }
-
-    /**
-     * Helper function to get an ExecutorInfo instance from the JSon String serialized from another object.
-     * @param  jsonString the string that will be de-serialized from.
-     * @return instance of the object if the parsing is successful, null other wise.
-     * @throws JsonParseException,JsonMappingException,IOException
-     * */
-    public static ExecutorInfo fromJSONString(String jsonString) throws IOException{
-      if (null == jsonString || jsonString.length() == 0) return null;
-      return new ObjectMapper().readValue(jsonString, ExecutorInfo.class);
+    return new ObjectMapper().readValue(jsonString, ExecutorInfo.class);
+  }
+
+  public double getCpuUsage() {
+    return this.cpuUsage;
+  }
+
+  public void setCpuUpsage(final double value) {
+    this.cpuUsage = value;
+  }
+
+  public double getRemainingMemoryPercent() {
+    return this.remainingMemoryPercent;
+  }
+
+  public void setRemainingMemoryPercent(final double value) {
+    this.remainingMemoryPercent = value;
+  }
+
+  public long getRemainingMemoryInMB() {
+    return this.remainingMemoryInMB;
+  }
+
+  public void setRemainingMemoryInMB(final long value) {
+    this.remainingMemoryInMB = value;
+  }
+
+  public int getRemainingFlowCapacity() {
+    return this.remainingFlowCapacity;
+  }
+
+  public void setRemainingFlowCapacity(final int value) {
+    this.remainingFlowCapacity = value;
+  }
+
+  public long getLastDispatchedTime() {
+    return this.lastDispatchedTime;
+  }
+
+  public void setLastDispatchedTime(final long value) {
+    this.lastDispatchedTime = value;
+  }
+
+  public int getNumberOfAssignedFlows() {
+    return this.numberOfAssignedFlows;
+  }
+
+  public void setNumberOfAssignedFlows(final int value) {
+    this.numberOfAssignedFlows = value;
+  }
+
+  @Override
+  public boolean equals(final Object obj) {
+    if (obj instanceof ExecutorInfo) {
+      boolean result = true;
+      final ExecutorInfo stat = (ExecutorInfo) obj;
+
+      result &= this.remainingMemoryInMB == stat.remainingMemoryInMB;
+      result &= this.cpuUsage == stat.cpuUsage;
+      result &= this.remainingFlowCapacity == stat.remainingFlowCapacity;
+      result &= this.remainingMemoryPercent == stat.remainingMemoryPercent;
+      result &= this.numberOfAssignedFlows == stat.numberOfAssignedFlows;
+      result &= this.lastDispatchedTime == stat.lastDispatchedTime;
+      return result;
     }
+    return false;
+  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutorLogEvent.java b/azkaban-common/src/main/java/azkaban/executor/ExecutorLogEvent.java
index 31526a3..cd0d38c 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutorLogEvent.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutorLogEvent.java
@@ -24,45 +24,6 @@ import java.util.Date;
  * @author gaggarwa
  */
 public class ExecutorLogEvent {
-  /**
-   * Log event type messages. Do not change the numeric representation of each
-   * enum. Only represent from 0 to 255 different codes.
-   */
-  public enum EventType {
-    ERROR(128), HOST_UPDATE(1), PORT_UPDATE(2), ACTIVATION(3), INACTIVATION(4),
-    CREATED(5);
-
-    private final int numVal;
-
-    EventType(int numVal) {
-      this.numVal = numVal;
-    }
-
-    public int getNumVal() {
-      return numVal;
-    }
-
-    public static EventType fromInteger(int x)
-        throws IllegalArgumentException {
-      switch (x) {
-      case 1:
-        return HOST_UPDATE;
-      case 2:
-        return PORT_UPDATE;
-      case 3:
-        return ACTIVATION;
-      case 4:
-        return INACTIVATION;
-      case 5:
-        return CREATED;
-      case 128:
-        return ERROR;
-      default:
-        throw new IllegalArgumentException(String.format(
-          "inalid status code %d", x));
-      }
-    }
-  }
 
   private final int executorId;
   private final String user;
@@ -70,8 +31,8 @@ public class ExecutorLogEvent {
   private final EventType type;
   private final String message;
 
-  public ExecutorLogEvent(int executorId, String user, Date time,
-    EventType type, String message) {
+  public ExecutorLogEvent(final int executorId, final String user, final Date time,
+      final EventType type, final String message) {
     this.executorId = executorId;
     this.user = user;
     this.time = time;
@@ -80,22 +41,62 @@ public class ExecutorLogEvent {
   }
 
   public int getExecutorId() {
-    return executorId;
+    return this.executorId;
   }
 
   public String getUser() {
-    return user;
+    return this.user;
   }
 
   public Date getTime() {
-    return time;
+    return this.time;
   }
 
   public EventType getType() {
-    return type;
+    return this.type;
   }
 
   public String getMessage() {
-    return message;
+    return this.message;
+  }
+
+  /**
+   * Log event type messages. Do not change the numeric representation of each
+   * enum. Only represent from 0 to 255 different codes.
+   */
+  public enum EventType {
+    ERROR(128), HOST_UPDATE(1), PORT_UPDATE(2), ACTIVATION(3), INACTIVATION(4),
+    CREATED(5);
+
+    private final int numVal;
+
+    EventType(final int numVal) {
+      this.numVal = numVal;
+    }
+
+    public static EventType fromInteger(final int x)
+        throws IllegalArgumentException {
+      switch (x) {
+        case 1:
+          return HOST_UPDATE;
+        case 2:
+          return PORT_UPDATE;
+        case 3:
+          return ACTIVATION;
+        case 4:
+          return INACTIVATION;
+        case 5:
+          return CREATED;
+        case 128:
+          return ERROR;
+        default:
+          throw new IllegalArgumentException(String.format(
+              "inalid status code %d", x));
+      }
+    }
+
+    public int getNumVal() {
+      return this.numVal;
+    }
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerAdapter.java b/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerAdapter.java
index c50b0bc..fec3bd3 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerAdapter.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerAdapter.java
@@ -16,6 +16,10 @@
 
 package azkaban.executor;
 
+import azkaban.project.Project;
+import azkaban.utils.FileIOUtils.JobMetaData;
+import azkaban.utils.FileIOUtils.LogData;
+import azkaban.utils.Pair;
 import java.io.IOException;
 import java.lang.Thread.State;
 import java.util.Collection;
@@ -23,11 +27,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import azkaban.project.Project;
-import azkaban.utils.FileIOUtils.JobMetaData;
-import azkaban.utils.FileIOUtils.LogData;
-import azkaban.utils.Pair;
-
 public interface ExecutorManagerAdapter {
 
   public static final String LOCAL_MODE = "local";
@@ -94,12 +93,9 @@ public interface ExecutorManagerAdapter {
    * Returns All running with executors and queued flows
    * Note, returns empty list if there isn't any running or queued flows
    * </pre>
-   *
-   * @return
-   * @throws IOException
    */
   public List<Pair<ExecutableFlow, Executor>> getActiveFlowsWithExecutor()
-    throws IOException;
+      throws IOException;
 
   public List<ExecutableFlow> getRecentlyFinishedFlows();
 
@@ -190,10 +186,9 @@ public interface ExecutorManagerAdapter {
    * <li>{@link azkaban.executor.ConnectorParams#STATS_SET_ENABLEMETRICS}<li>
    * <li>{@link azkaban.executor.ConnectorParams#STATS_SET_DISABLEMETRICS}<li>
    * </ul>
-   * @throws ExecutorManagerException
    */
   public Map<String, Object> callExecutorStats(int executorId, String action,
-    Pair<String, String>... param) throws IOException, ExecutorManagerException;
+      Pair<String, String>... param) throws IOException, ExecutorManagerException;
 
   public Map<String, Object> callExecutorJMX(String hostPort, String action,
       String mBean) throws IOException;
@@ -213,8 +208,6 @@ public interface ExecutorManagerAdapter {
   /**
    * Returns a collection of all the active executors maintained by active
    * executors
-   *
-   * @return
    */
   public Collection<Executor> getAllActiveExecutors();
 
@@ -225,9 +218,6 @@ public interface ExecutorManagerAdapter {
    * 1. throws an Exception in case of a SQL issue
    * 2. return null when no executor is found with the given executorId
    * </pre>
-   *
-   * @throws ExecutorManagerException
-   *
    */
   public Executor fetchExecutor(int executorId) throws ExecutorManagerException;
 
@@ -242,22 +232,16 @@ public interface ExecutorManagerAdapter {
    * 3. In local mode, If a local executor is specified and it is marked inactive in db,
    *    this method will convert local executor as active in DB
    * </pre>
-   *
-   * @throws ExecutorManagerException
    */
-   public void setupExecutors() throws ExecutorManagerException;
-
-   /**
-    * Enable flow dispatching in QueueProcessor
-    *
-    * @throws ExecutorManagerException
-    */
-   public void enableQueueProcessorThread() throws ExecutorManagerException;
-
-   /**
-    * Disable flow dispatching in QueueProcessor
-    *
-    * @throws ExecutorManagerException
-    */
-   public void disableQueueProcessorThread() throws ExecutorManagerException;
+  public void setupExecutors() throws ExecutorManagerException;
+
+  /**
+   * Enable flow dispatching in QueueProcessor
+   */
+  public void enableQueueProcessorThread() throws ExecutorManagerException;
+
+  /**
+   * Disable flow dispatching in QueueProcessor
+   */
+  public void disableQueueProcessorThread() throws ExecutorManagerException;
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerException.java b/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerException.java
index a02073c..1ed4e25 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerException.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerException.java
@@ -17,41 +17,42 @@
 package azkaban.executor;
 
 public class ExecutorManagerException extends Exception {
-  public enum Reason {
-    SkippedExecution
-  }
 
   private static final long serialVersionUID = 1L;
   private ExecutableFlow flow = null;
   private Reason reason = null;
 
-  public ExecutorManagerException(Exception e) {
+  public ExecutorManagerException(final Exception e) {
     super(e);
   }
 
-  public ExecutorManagerException(String message) {
+  public ExecutorManagerException(final String message) {
     super(message);
   }
 
-  public ExecutorManagerException(String message, ExecutableFlow flow) {
+  public ExecutorManagerException(final String message, final ExecutableFlow flow) {
     super(message);
     this.flow = flow;
   }
 
-  public ExecutorManagerException(String message, Reason reason) {
+  public ExecutorManagerException(final String message, final Reason reason) {
     super(message);
     this.reason = reason;
   }
 
-  public ExecutorManagerException(String message, Throwable cause) {
+  public ExecutorManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
   public ExecutableFlow getExecutableFlow() {
-    return flow;
+    return this.flow;
   }
 
   public Reason getReason() {
-    return reason;
+    return this.reason;
+  }
+
+  public enum Reason {
+    SkippedExecution
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerServlet.java b/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerServlet.java
index ba93304..e9c87c6 100644
--- a/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerServlet.java
+++ b/azkaban-common/src/main/java/azkaban/executor/ExecutorManagerServlet.java
@@ -16,50 +16,47 @@
 
 package azkaban.executor;
 
+import azkaban.server.AbstractServiceServlet;
+import azkaban.utils.FileIOUtils.LogData;
+import azkaban.utils.JSONUtils;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Map;
-
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.FileIOUtils.LogData;
-import azkaban.utils.JSONUtils;
-import azkaban.server.AbstractServiceServlet;
-
 public class ExecutorManagerServlet extends AbstractServiceServlet {
-  private final ExecutorManagerAdapter executorManager;
 
   public static final String URL = "executorManager";
   private static final long serialVersionUID = 1L;
   private static final Logger logger = Logger
       .getLogger(ExecutorManagerServlet.class);
+  private final ExecutorManagerAdapter executorManager;
 
-  public ExecutorManagerServlet(ExecutorManagerAdapter executorManager) {
+  public ExecutorManagerServlet(final ExecutorManagerAdapter executorManager) {
     this.executorManager = executorManager;
   }
 
   @Override
-  public void doGet(HttpServletRequest req, HttpServletResponse resp)
+  public void doGet(final HttpServletRequest req, final HttpServletResponse resp)
       throws ServletException, IOException {
-    HashMap<String, Object> respMap = new HashMap<String, Object>();
+    final HashMap<String, Object> respMap = new HashMap<>();
     try {
       if (!hasParam(req, ExecutorManagerAdapter.INFO_ACTION)) {
         logger.error("Parameter action not set");
         respMap.put("error", "Parameter action not set");
       } else {
-        String action = getParam(req, ExecutorManagerAdapter.INFO_ACTION);
+        final String action = getParam(req, ExecutorManagerAdapter.INFO_ACTION);
         if (action.equals(ExecutorManagerAdapter.ACTION_UPDATE)) {
           handleAjaxUpdateRequest(req, respMap);
         } else {
-          int execid =
+          final int execid =
               Integer.parseInt(getParam(req,
                   ExecutorManagerAdapter.INFO_EXEC_ID));
-          String user =
+          final String user =
               getParam(req, ExecutorManagerAdapter.INFO_USER_ID, null);
 
           logger.info("User " + user + " has called action " + action + " on "
@@ -89,7 +86,7 @@ public class ExecutorManagerServlet extends AbstractServiceServlet {
           }
         }
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e);
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e.getMessage());
     }
@@ -97,8 +94,8 @@ public class ExecutorManagerServlet extends AbstractServiceServlet {
     resp.flushBuffer();
   }
 
-  private void handleModifyExecution(HashMap<String, Object> respMap,
-      int execid, String user, HttpServletRequest req) {
+  private void handleModifyExecution(final HashMap<String, Object> respMap,
+      final int execid, final String user, final HttpServletRequest req) {
     if (!hasParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND)) {
       respMap.put(ExecutorManagerAdapter.INFO_ERROR,
           "Modification command not set.");
@@ -106,121 +103,120 @@ public class ExecutorManagerServlet extends AbstractServiceServlet {
     }
 
     try {
-      String modificationType =
+      final String modificationType =
           getParam(req, ExecutorManagerAdapter.INFO_MODIFY_COMMAND);
-      ExecutableFlow exflow = executorManager.getExecutableFlow(execid);
+      final ExecutableFlow exflow = this.executorManager.getExecutableFlow(execid);
       if (ExecutorManagerAdapter.COMMAND_MODIFY_RETRY_FAILURES
           .equals(modificationType)) {
-        executorManager.retryFailures(exflow, user);
+        this.executorManager.retryFailures(exflow, user);
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
   }
 
-  private void handleAjaxResumeFlow(HashMap<String, Object> respMap,
-      int execid, String user) {
+  private void handleAjaxResumeFlow(final HashMap<String, Object> respMap,
+      final int execid, final String user) {
     try {
-      ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
-      executorManager.resumeFlow(exFlow, user);
-    } catch (Exception e) {
+      final ExecutableFlow exFlow = this.executorManager.getExecutableFlow(execid);
+      this.executorManager.resumeFlow(exFlow, user);
+    } catch (final Exception e) {
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
 
   }
 
-  private void handleAjaxPauseFlow(HashMap<String, Object> respMap, int execid,
-      String user) {
+  private void handleAjaxPauseFlow(final HashMap<String, Object> respMap, final int execid,
+      final String user) {
     try {
-      ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
-      executorManager.pauseFlow(exFlow, user);
-    } catch (Exception e) {
+      final ExecutableFlow exFlow = this.executorManager.getExecutableFlow(execid);
+      this.executorManager.pauseFlow(exFlow, user);
+    } catch (final Exception e) {
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
   }
 
-  private void handleAjaxCancelFlow(HashMap<String, Object> respMap,
-      int execid, String user) {
+  private void handleAjaxCancelFlow(final HashMap<String, Object> respMap,
+      final int execid, final String user) {
     try {
-      ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
-      executorManager.cancelFlow(exFlow, user);
-    } catch (Exception e) {
+      final ExecutableFlow exFlow = this.executorManager.getExecutableFlow(execid);
+      this.executorManager.cancelFlow(exFlow, user);
+    } catch (final Exception e) {
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
   }
 
-  private void handleAjaxSubmitFlow(HttpServletRequest req,
-      HashMap<String, Object> respMap, int execid) {
+  private void handleAjaxSubmitFlow(final HttpServletRequest req,
+      final HashMap<String, Object> respMap, final int execid) {
     try {
-      String execFlowJson =
+      final String execFlowJson =
           getParam(req, ExecutorManagerAdapter.INFO_EXEC_FLOW_JSON);
-      ExecutableFlow exflow =
+      final ExecutableFlow exflow =
           ExecutableFlow.createExecutableFlowFromObject(JSONUtils
               .parseJSONFromString(execFlowJson));
-      String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID);
-      executorManager.submitExecutableFlow(exflow, user);
+      final String user = getParam(req, ExecutorManagerAdapter.INFO_USER_ID);
+      this.executorManager.submitExecutableFlow(exflow, user);
       respMap.put(ExecutorManagerAdapter.INFO_EXEC_ID, exflow.getExecutionId());
-    } catch (Exception e) {
+    } catch (final Exception e) {
       e.printStackTrace();
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
   }
 
-  private void handleFetchJobLogEvent(int execid, HttpServletRequest req,
-      HttpServletResponse resp, HashMap<String, Object> respMap) {
+  private void handleFetchJobLogEvent(final int execid, final HttpServletRequest req,
+      final HttpServletResponse resp, final HashMap<String, Object> respMap) {
     try {
-      ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
-      String jobId = getParam(req, ExecutorManagerAdapter.INFO_JOB_NAME);
-      int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
-      int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
-      int attempt = getIntParam(req, ExecutorManagerAdapter.INFO_ATTEMPT);
-      LogData log =
-          executorManager.getExecutionJobLog(exFlow, jobId, offset, length,
+      final ExecutableFlow exFlow = this.executorManager.getExecutableFlow(execid);
+      final String jobId = getParam(req, ExecutorManagerAdapter.INFO_JOB_NAME);
+      final int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
+      final int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
+      final int attempt = getIntParam(req, ExecutorManagerAdapter.INFO_ATTEMPT);
+      final LogData log =
+          this.executorManager.getExecutionJobLog(exFlow, jobId, offset, length,
               attempt);
       respMap.put(ExecutorManagerAdapter.INFO_LOG,
           JSONUtils.toJSON(log.toObject()));
-    } catch (Exception e) {
+    } catch (final Exception e) {
       e.printStackTrace();
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
   }
 
-  private void handleFetchFlowLogEvent(int execid, HttpServletRequest req,
-      HttpServletResponse resp, HashMap<String, Object> respMap) {
+  private void handleFetchFlowLogEvent(final int execid, final HttpServletRequest req,
+      final HttpServletResponse resp, final HashMap<String, Object> respMap) {
     try {
-      ExecutableFlow exFlow = executorManager.getExecutableFlow(execid);
-      int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
-      int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
-      LogData log =
-          executorManager.getExecutableFlowLog(exFlow, offset, length);
+      final ExecutableFlow exFlow = this.executorManager.getExecutableFlow(execid);
+      final int offset = getIntParam(req, ExecutorManagerAdapter.INFO_OFFSET);
+      final int length = getIntParam(req, ExecutorManagerAdapter.INFO_LENGTH);
+      final LogData log =
+          this.executorManager.getExecutableFlowLog(exFlow, offset, length);
       respMap.put(ExecutorManagerAdapter.INFO_LOG,
           JSONUtils.toJSON(log.toObject()));
-    } catch (Exception e) {
+    } catch (final Exception e) {
       e.printStackTrace();
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
 
   }
 
-  @SuppressWarnings("unchecked")
-  private void handleAjaxUpdateRequest(HttpServletRequest req,
-      HashMap<String, Object> respMap) {
+  private void handleAjaxUpdateRequest(final HttpServletRequest req,
+      final HashMap<String, Object> respMap) {
     try {
-      ArrayList<Object> updateTimesList =
+      final ArrayList<Object> updateTimesList =
           (ArrayList<Object>) JSONUtils.parseJSONFromString(getParam(req,
               ExecutorManagerAdapter.INFO_UPDATE_TIME_LIST));
-      ArrayList<Object> execIDList =
+      final ArrayList<Object> execIDList =
           (ArrayList<Object>) JSONUtils.parseJSONFromString(getParam(req,
               ExecutorManagerAdapter.INFO_EXEC_ID_LIST));
 
-      ArrayList<Object> updateList = new ArrayList<Object>();
+      final ArrayList<Object> updateList = new ArrayList<>();
       for (int i = 0; i < execIDList.size(); ++i) {
-        long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
-        int execId = (Integer) execIDList.get(i);
+        final long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
+        final int execId = (Integer) execIDList.get(i);
 
-        ExecutableFlow flow = executorManager.getExecutableFlow(execId);
+        final ExecutableFlow flow = this.executorManager.getExecutableFlow(execId);
         if (flow == null) {
-          Map<String, Object> errorResponse = new HashMap<String, Object>();
+          final Map<String, Object> errorResponse = new HashMap<>();
           errorResponse.put(ExecutorManagerAdapter.INFO_ERROR,
               "Flow does not exist");
           errorResponse.put(ExecutorManagerAdapter.INFO_EXEC_ID, execId);
@@ -234,7 +230,7 @@ public class ExecutorManagerServlet extends AbstractServiceServlet {
       }
 
       respMap.put(ExecutorManagerAdapter.INFO_UPDATES, updateList);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       e.printStackTrace();
       respMap.put(ExecutorManagerAdapter.INFO_ERROR, e);
     }
diff --git a/azkaban-common/src/main/java/azkaban/executor/mail/DefaultMailCreator.java b/azkaban-common/src/main/java/azkaban/executor/mail/DefaultMailCreator.java
index bedb884..9080a39 100644
--- a/azkaban-common/src/main/java/azkaban/executor/mail/DefaultMailCreator.java
+++ b/azkaban-common/src/main/java/azkaban/executor/mail/DefaultMailCreator.java
@@ -16,32 +16,36 @@
 
 package azkaban.executor.mail;
 
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
 import azkaban.executor.ExecutionOptions.FailureAction;
 import azkaban.utils.EmailMessage;
 import azkaban.utils.Emailer;
 import azkaban.utils.Utils;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
 
 public class DefaultMailCreator implements MailCreator {
-  public static final String DEFAULT_MAIL_CREATOR = "default";
-  private static HashMap<String, MailCreator> registeredCreators = new HashMap<>();
-  private static MailCreator defaultCreator;
 
+  public static final String DEFAULT_MAIL_CREATOR = "default";
   private static final DateFormat DATE_FORMATTER = new SimpleDateFormat(
       "yyyy/MM/dd HH:mm:ss z");
+  private static final HashMap<String, MailCreator> registeredCreators = new HashMap<>();
+  private static final MailCreator defaultCreator;
 
-  public static void registerCreator(String name, MailCreator creator) {
+  static {
+    defaultCreator = new DefaultMailCreator();
+    registerCreator(DEFAULT_MAIL_CREATOR, defaultCreator);
+  }
+
+  public static void registerCreator(final String name, final MailCreator creator) {
     registeredCreators.put(name, creator);
   }
 
-  public static MailCreator getCreator(String name) {
+  public static MailCreator getCreator(final String name) {
     MailCreator creator = registeredCreators.get(name);
     if (creator == null) {
       creator = defaultCreator;
@@ -49,19 +53,22 @@ public class DefaultMailCreator implements MailCreator {
     return creator;
   }
 
-  static {
-    defaultCreator = new DefaultMailCreator();
-    registerCreator(DEFAULT_MAIL_CREATOR, defaultCreator);
+  private static String convertMSToString(final long timeInMS) {
+    if (timeInMS < 0) {
+      return "N/A";
+    } else {
+      return DATE_FORMATTER.format(new Date(timeInMS));
+    }
   }
 
   @Override
-  public boolean createFirstErrorMessage(ExecutableFlow flow,
-      EmailMessage message, String azkabanName, String scheme,
-      String clientHostname, String clientPortNumber, String... vars) {
+  public boolean createFirstErrorMessage(final ExecutableFlow flow,
+      final EmailMessage message, final String azkabanName, final String scheme,
+      final String clientHostname, final String clientPortNumber, final String... vars) {
 
-    ExecutionOptions option = flow.getExecutionOptions();
-    List<String> emailList = option.getFailureEmails();
-    int execId = flow.getExecutionId();
+    final ExecutionOptions option = flow.getExecutionOptions();
+    final List<String> emailList = option.getFailureEmails();
+    final int execId = flow.getExecutionId();
 
     if (emailList != null && !emailList.isEmpty()) {
       message.addAllToAddress(emailList);
@@ -95,7 +102,7 @@ public class DefaultMailCreator implements MailCreator {
       message.println("<tr><td>Status</td><td>" + flow.getStatus() + "</td></tr>");
       message.println("</table>");
       message.println("");
-      String executionUrl =
+      final String executionUrl =
           scheme + "://" + clientHostname + ":" + clientPortNumber + "/"
               + "executor?" + "execid=" + execId;
       message.println("<a href=\"" + executionUrl + "\">" + flow.getFlowId()
@@ -103,9 +110,9 @@ public class DefaultMailCreator implements MailCreator {
 
       message.println("");
       message.println("<h3>Reason</h3>");
-      List<String> failedJobs = Emailer.findFailedJobs(flow);
+      final List<String> failedJobs = Emailer.findFailedJobs(flow);
       message.println("<ul>");
-      for (String jobId : failedJobs) {
+      for (final String jobId : failedJobs) {
         message.println("<li><a href=\"" + executionUrl + "&job=" + jobId
             + "\">Failed job '" + jobId + "' Link</a></li>");
       }
@@ -118,14 +125,14 @@ public class DefaultMailCreator implements MailCreator {
   }
 
   @Override
-  public boolean createErrorEmail(ExecutableFlow flow, EmailMessage message,
-      String azkabanName, String scheme, String clientHostname,
-      String clientPortNumber, String... vars) {
+  public boolean createErrorEmail(final ExecutableFlow flow, final EmailMessage message,
+      final String azkabanName, final String scheme, final String clientHostname,
+      final String clientPortNumber, final String... vars) {
 
-    ExecutionOptions option = flow.getExecutionOptions();
+    final ExecutionOptions option = flow.getExecutionOptions();
 
-    List<String> emailList = option.getFailureEmails();
-    int execId = flow.getExecutionId();
+    final List<String> emailList = option.getFailureEmails();
+    final int execId = flow.getExecutionId();
 
     if (emailList != null && !emailList.isEmpty()) {
       message.addAllToAddress(emailList);
@@ -147,7 +154,7 @@ public class DefaultMailCreator implements MailCreator {
       message.println("<tr><td>Status</td><td>" + flow.getStatus() + "</td></tr>");
       message.println("</table>");
       message.println("");
-      String executionUrl =
+      final String executionUrl =
           scheme + "://" + clientHostname + ":" + clientPortNumber + "/"
               + "executor?" + "execid=" + execId;
       message.println("<a href=\"" + executionUrl + "\">" + flow.getFlowId()
@@ -155,13 +162,13 @@ public class DefaultMailCreator implements MailCreator {
 
       message.println("");
       message.println("<h3>Reason</h3>");
-      List<String> failedJobs = Emailer.findFailedJobs(flow);
+      final List<String> failedJobs = Emailer.findFailedJobs(flow);
       message.println("<ul>");
-      for (String jobId : failedJobs) {
+      for (final String jobId : failedJobs) {
         message.println("<li><a href=\"" + executionUrl + "&job=" + jobId
             + "\">Failed job '" + jobId + "' Link</a></li>");
       }
-      for (String reasons : vars) {
+      for (final String reasons : vars) {
         message.println("<li>" + reasons + "</li>");
       }
 
@@ -172,14 +179,14 @@ public class DefaultMailCreator implements MailCreator {
   }
 
   @Override
-  public boolean createSuccessEmail(ExecutableFlow flow, EmailMessage message,
-      String azkabanName, String scheme, String clientHostname,
-      String clientPortNumber, String... vars) {
+  public boolean createSuccessEmail(final ExecutableFlow flow, final EmailMessage message,
+      final String azkabanName, final String scheme, final String clientHostname,
+      final String clientPortNumber, final String... vars) {
 
-    ExecutionOptions option = flow.getExecutionOptions();
-    List<String> emailList = option.getSuccessEmails();
+    final ExecutionOptions option = flow.getExecutionOptions();
+    final List<String> emailList = option.getSuccessEmails();
 
-    int execId = flow.getExecutionId();
+    final int execId = flow.getExecutionId();
 
     if (emailList != null && !emailList.isEmpty()) {
       message.addAllToAddress(emailList);
@@ -201,7 +208,7 @@ public class DefaultMailCreator implements MailCreator {
       message.println("<tr><td>Status</td><td>" + flow.getStatus() + "</td></tr>");
       message.println("</table>");
       message.println("");
-      String executionUrl =
+      final String executionUrl =
           scheme + "://" + clientHostname + ":" + clientPortNumber + "/"
               + "executor?" + "execid=" + execId;
       message.println("<a href=\"" + executionUrl + "\">" + flow.getFlowId()
@@ -210,12 +217,4 @@ public class DefaultMailCreator implements MailCreator {
     }
     return false;
   }
-
-  private static String convertMSToString(long timeInMS) {
-    if (timeInMS < 0) {
-      return "N/A";
-    } else {
-      return DATE_FORMATTER.format(new Date(timeInMS));
-    }
-  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/mail/MailCreator.java b/azkaban-common/src/main/java/azkaban/executor/mail/MailCreator.java
index 13f0185..8df7407 100644
--- a/azkaban-common/src/main/java/azkaban/executor/mail/MailCreator.java
+++ b/azkaban-common/src/main/java/azkaban/executor/mail/MailCreator.java
@@ -20,6 +20,7 @@ import azkaban.executor.ExecutableFlow;
 import azkaban.utils.EmailMessage;
 
 public interface MailCreator {
+
   public boolean createFirstErrorMessage(ExecutableFlow flow,
       EmailMessage message, String azkabanName, String scheme,
       String clientHostname, String clientPortNumber, String... vars);
diff --git a/azkaban-common/src/main/java/azkaban/executor/QueuedExecutions.java b/azkaban-common/src/main/java/azkaban/executor/QueuedExecutions.java
index 641ffae..7eb592d 100644
--- a/azkaban-common/src/main/java/azkaban/executor/QueuedExecutions.java
+++ b/azkaban-common/src/main/java/azkaban/executor/QueuedExecutions.java
@@ -1,15 +1,13 @@
 package azkaban.executor;
 
+import azkaban.utils.Pair;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.PriorityBlockingQueue;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Pair;
-
 /**
  * <pre>
  * Composite data structure to represent non-dispatched flows in webserver.
@@ -17,7 +15,8 @@ import azkaban.utils.Pair;
  * </pre>
  */
 public class QueuedExecutions {
-  private static Logger logger = Logger.getLogger(QueuedExecutions.class);
+
+  private static final Logger logger = Logger.getLogger(QueuedExecutions.class);
   final long capacity;
 
   /* map to easily access queued flows */
@@ -25,40 +24,35 @@ public class QueuedExecutions {
   /* actual queue */
   final private BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queuedFlowList;
 
-  public QueuedExecutions(long capacity) {
+  public QueuedExecutions(final long capacity) {
     this.capacity = capacity;
-    queuedFlowMap =
-      new ConcurrentHashMap<Integer, Pair<ExecutionReference, ExecutableFlow>>();
-    queuedFlowList =
-      new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
-        new ExecutableFlowPriorityComparator());
+    this.queuedFlowMap =
+        new ConcurrentHashMap<>();
+    this.queuedFlowList =
+        new PriorityBlockingQueue<>(10,
+            new ExecutableFlowPriorityComparator());
   }
 
   /**
    * Wraps BoundedQueue Take method to have a corresponding update in
    * queuedFlowMap lookup table
-   *
-   * @return
-   * @throws InterruptedException
    */
   public Pair<ExecutionReference, ExecutableFlow> fetchHead()
-    throws InterruptedException {
-    Pair<ExecutionReference, ExecutableFlow> pair = queuedFlowList.take();
+      throws InterruptedException {
+    final Pair<ExecutionReference, ExecutableFlow> pair = this.queuedFlowList.take();
     if (pair != null && pair.getFirst() != null) {
-      queuedFlowMap.remove(pair.getFirst().getExecId());
+      this.queuedFlowMap.remove(pair.getFirst().getExecId());
     }
     return pair;
   }
 
   /**
    * Helper method to have a single point of deletion in the queued flows
-   *
-   * @param executionId
    */
-  public void dequeue(int executionId) {
-    if (queuedFlowMap.containsKey(executionId)) {
-      queuedFlowList.remove(queuedFlowMap.get(executionId));
-      queuedFlowMap.remove(executionId);
+  public void dequeue(final int executionId) {
+    if (this.queuedFlowMap.containsKey(executionId)) {
+      this.queuedFlowList.remove(this.queuedFlowMap.get(executionId));
+      this.queuedFlowMap.remove(executionId);
     }
   }
 
@@ -77,20 +71,20 @@ public class QueuedExecutions {
    *           same execution Id
    * </pre>
    */
-  public void enqueue(ExecutableFlow exflow, ExecutionReference ref)
-    throws ExecutorManagerException {
+  public void enqueue(final ExecutableFlow exflow, final ExecutionReference ref)
+      throws ExecutorManagerException {
     if (hasExecution(exflow.getExecutionId())) {
-      String errMsg = "Flow already in queue " + exflow.getExecutionId();
+      final String errMsg = "Flow already in queue " + exflow.getExecutionId();
       throw new ExecutorManagerException(errMsg);
     }
 
-    Pair<ExecutionReference, ExecutableFlow> pair =
-      new Pair<ExecutionReference, ExecutableFlow>(ref, exflow);
+    final Pair<ExecutionReference, ExecutableFlow> pair =
+        new Pair<>(ref, exflow);
     try {
-      queuedFlowMap.put(exflow.getExecutionId(), pair);
-      queuedFlowList.put(pair);
-    } catch (InterruptedException e) {
-      String errMsg = "Failed to insert flow " + exflow.getExecutionId();
+      this.queuedFlowMap.put(exflow.getExecutionId(), pair);
+      this.queuedFlowList.put(pair);
+    } catch (final InterruptedException e) {
+      final String errMsg = "Failed to insert flow " + exflow.getExecutionId();
       logger.error(errMsg, e);
       throw new ExecutorManagerException(errMsg);
     }
@@ -110,41 +104,33 @@ public class QueuedExecutions {
    * </pre>
    */
   public void enqueueAll(
-    Collection<Pair<ExecutionReference, ExecutableFlow>> collection)
-    throws ExecutorManagerException {
-    for (Pair<ExecutionReference, ExecutableFlow> pair : collection) {
+      final Collection<Pair<ExecutionReference, ExecutableFlow>> collection)
+      throws ExecutorManagerException {
+    for (final Pair<ExecutionReference, ExecutableFlow> pair : collection) {
       enqueue(pair.getSecond(), pair.getFirst());
     }
   }
 
   /**
    * Returns a read only collection of all the queued (flows, reference) pairs
-   *
-   * @return
    */
   public Collection<Pair<ExecutionReference, ExecutableFlow>> getAllEntries() {
-    return Collections.unmodifiableCollection(queuedFlowMap.values());
+    return Collections.unmodifiableCollection(this.queuedFlowMap.values());
   }
 
   /**
    * Checks if an execution is queued or not
-   *
-   * @param executionId
-   * @return
    */
-  public boolean hasExecution(int executionId) {
-    return queuedFlowMap.containsKey(executionId);
+  public boolean hasExecution(final int executionId) {
+    return this.queuedFlowMap.containsKey(executionId);
   }
 
   /**
    * Fetch flow for an execution. Returns null, if execution not in queue
-   *
-   * @param executionId
-   * @return
    */
-  public ExecutableFlow getFlow(int executionId) {
+  public ExecutableFlow getFlow(final int executionId) {
     if (hasExecution(executionId)) {
-      return queuedFlowMap.get(executionId).getSecond();
+      return this.queuedFlowMap.get(executionId).getSecond();
     }
     return null;
   }
@@ -152,49 +138,40 @@ public class QueuedExecutions {
   /**
    * Fetch Activereference for an execution. Returns null, if execution not in
    * queue
-   *
-   * @param executionId
-   * @return
    */
-  public ExecutionReference getReference(int executionId) {
+  public ExecutionReference getReference(final int executionId) {
     if (hasExecution(executionId)) {
-      return queuedFlowMap.get(executionId).getFirst();
+      return this.queuedFlowMap.get(executionId).getFirst();
     }
     return null;
   }
 
   /**
    * Size of the queue
-   *
-   * @return
    */
   public long size() {
-    return queuedFlowList.size();
+    return this.queuedFlowList.size();
   }
 
   /**
    * Verify, if queue is full as per initialized capacity
-   *
-   * @return
    */
   public boolean isFull() {
-    return size() >= capacity;
+    return size() >= this.capacity;
   }
 
   /**
    * Verify, if queue is empty or not
-   *
-   * @return
    */
   public boolean isEmpty() {
-    return queuedFlowList.isEmpty() && queuedFlowMap.isEmpty();
+    return this.queuedFlowList.isEmpty() && this.queuedFlowMap.isEmpty();
   }
 
   /**
    * Empties queue by dequeuing all the elements
    */
   public void clear() {
-    for (Pair<ExecutionReference, ExecutableFlow> pair : queuedFlowMap.values()) {
+    for (final Pair<ExecutionReference, ExecutableFlow> pair : this.queuedFlowMap.values()) {
       dequeue(pair.getFirst().getExecId());
     }
   }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/CandidateComparator.java b/azkaban-common/src/main/java/azkaban/executor/selector/CandidateComparator.java
index cf4145e..faf509a 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/CandidateComparator.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/CandidateComparator.java
@@ -16,71 +16,81 @@
 
 package azkaban.executor.selector;
 
+import azkaban.utils.Pair;
 import java.util.Collection;
 import java.util.Comparator;
 import java.util.Map;
 import java.util.Objects;
 import java.util.concurrent.ConcurrentHashMap;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Pair;
-
 /**
  * <pre>
  *  Abstract class for a candidate comparator.
- *  this class contains implementation of most of the core logics. Implementing classes is expected only to
+ *  this class contains implementation of most of the core logics. Implementing classes is expected
+ * only to
  *  register factor comparators using the provided register function.
  * <pre>
  */
 public abstract class CandidateComparator<T> implements Comparator<T> {
+
   protected static Logger logger = Logger.getLogger(CandidateComparator.class);
 
   // internal repository of the registered comparators .
-  private Map<String,FactorComparator<T>> factorComparatorList =
-      new ConcurrentHashMap<String,FactorComparator<T>>();
+  private final Map<String, FactorComparator<T>> factorComparatorList =
+      new ConcurrentHashMap<>();
 
-  /** gets the name of the current implementation of the candidate comparator.
+  /**
+   * gets the name of the current implementation of the candidate comparator.
+   *
    * @returns : name of the comparator.
-   * */
+   */
   public abstract String getName();
 
-  /** tieBreak method which will kick in when the comparator list generated an equality result for
-   *  both sides. the tieBreak method will try best to make sure a stable result is returned.
-   * */
-  protected boolean tieBreak(T object1, T object2){
-    if (null == object2) return true;
-    if (null == object1) return false;
+  /**
+   * tieBreak method which will kick in when the comparator list generated an equality result for
+   * both sides. the tieBreak method will try best to make sure a stable result is returned.
+   */
+  protected boolean tieBreak(final T object1, final T object2) {
+    if (null == object2) {
+      return true;
+    }
+    if (null == object1) {
+      return false;
+    }
     return object1.hashCode() >= object2.hashCode();
   }
 
-  /** function to register a factorComparator to the internal Map for future reference.
+  /**
+   * function to register a factorComparator to the internal Map for future reference.
+   *
    * @param factorComparator : the comparator object to be registered.
-   * @throws IllegalArgumentException
-   * */
-  protected void registerFactorComparator(FactorComparator<T> comparator){
-      if (null == comparator ||
-          Integer.MAX_VALUE - this.getTotalWeight() < comparator.getWeight() ) {
-        throw new IllegalArgumentException("unable to register comparator."+
+   */
+  protected void registerFactorComparator(final FactorComparator<T> comparator) {
+    if (null == comparator ||
+        Integer.MAX_VALUE - this.getTotalWeight() < comparator.getWeight()) {
+      throw new IllegalArgumentException("unable to register comparator." +
           " The passed comparator is null or has an invalid weight value.");
-      }
+    }
 
-      // add or replace the Comparator.
-      this.factorComparatorList.put(comparator.getFactorName(),comparator);
-      logger.debug(String.format("Factor comparator added for '%s'. Weight = '%s'",
-          comparator.getFactorName(), comparator.getWeight()));
+    // add or replace the Comparator.
+    this.factorComparatorList.put(comparator.getFactorName(), comparator);
+    logger.debug(String.format("Factor comparator added for '%s'. Weight = '%s'",
+        comparator.getFactorName(), comparator.getWeight()));
   }
 
-  /** function returns the total weight of the registered comparators.
+  /**
+   * function returns the total weight of the registered comparators.
+   *
    * @return the value of total weight.
-   * */
-  public int getTotalWeight(){
-    int totalWeight = 0 ;
+   */
+  public int getTotalWeight() {
+    int totalWeight = 0;
 
     // save out a copy of the values as HashMap.values() takes o(n) to return the value.
-    Collection<FactorComparator<T>> allValues = this.factorComparatorList.values();
-    for (FactorComparator<T> item : allValues){
-      if (item != null){
+    final Collection<FactorComparator<T>> allValues = this.factorComparatorList.values();
+    for (final FactorComparator<T> item : allValues) {
+      if (item != null) {
         totalWeight += item.getWeight();
       }
     }
@@ -94,66 +104,74 @@ public abstract class CandidateComparator<T> implements Comparator<T> {
    *  the comparison follows the following logic -
    *  1. if both objects are equal return 0 score for both.
    *  2. if one side is null, the other side gets all the score.
-   *  3. if both sides are non-null value, both values will be passed to all the registered FactorComparators
-   *     each factor comparator will generate a result based off it sole logic the weight of the comparator will be
+   *  3. if both sides are non-null value, both values will be passed to all the registered
+   * FactorComparators
+   *     each factor comparator will generate a result based off it sole logic the weight of the
+   * comparator will be
    *     added to the wining side, if equal, no value will be added to either side.
    *  4. final result will be returned in a Pair container.
    *
    * </pre>
-   * @param object1  the first  object (left side)  to be compared.
-   * @param object2  the second object (right side) to be compared.
+   *
+   * @param object1 the first  object (left side)  to be compared.
+   * @param object2 the second object (right side) to be compared.
    * @return a pair structure contains the score for both sides.
-   * */
-  public Pair<Integer,Integer> getComparisonScore(T object1, T object2){
+   */
+  public Pair<Integer, Integer> getComparisonScore(final T object1, final T object2) {
     logger.debug(String.format("start comparing '%s' with '%s',  total weight = %s ",
         object1 == null ? "(null)" : object1.toString(),
         object2 == null ? "(null)" : object2.toString(),
         this.getTotalWeight()));
 
-    int result1 = 0 ;
-    int result2 = 0 ;
+    int result1 = 0;
+    int result2 = 0;
 
     // short cut if object equals.
-    if (object1 ==  object2){
+    if (object1 == object2) {
       logger.debug("[Comparator] same object.");
     } else
-    // left side is null.
-    if (object1 == null){
-      logger.debug("[Comparator] left side is null, right side gets total weight.");
-      result2 = this.getTotalWeight();
-    } else
-    // right side is null.
-    if (object2 == null){
-      logger.debug("[Comparator] right side is null, left side gets total weight.");
-      result1 = this.getTotalWeight();
-    } else
-    // both side is not null,put them thru the full loop
-    {
-      Collection<FactorComparator<T>> comparatorList = this.factorComparatorList.values();
-      for (FactorComparator<T> comparator :comparatorList){
-        int result = comparator.compare(object1, object2);
-        result1  = result1 + (result > 0 ? comparator.getWeight() : 0);
-        result2  = result2 + (result < 0 ? comparator.getWeight() : 0);
-        logger.debug(String.format("[Factor: %s] compare result : %s (current score %s vs %s)",
-            comparator.getFactorName(), result, result1, result2));
-      }
-    }
+      // left side is null.
+      if (object1 == null) {
+        logger.debug("[Comparator] left side is null, right side gets total weight.");
+        result2 = this.getTotalWeight();
+      } else
+        // right side is null.
+        if (object2 == null) {
+          logger.debug("[Comparator] right side is null, left side gets total weight.");
+          result1 = this.getTotalWeight();
+        } else
+        // both side is not null,put them thru the full loop
+        {
+          final Collection<FactorComparator<T>> comparatorList = this.factorComparatorList.values();
+          for (final FactorComparator<T> comparator : comparatorList) {
+            final int result = comparator.compare(object1, object2);
+            result1 = result1 + (result > 0 ? comparator.getWeight() : 0);
+            result2 = result2 + (result < 0 ? comparator.getWeight() : 0);
+            logger.debug(String.format("[Factor: %s] compare result : %s (current score %s vs %s)",
+                comparator.getFactorName(), result, result1, result2));
+          }
+        }
     // in case of same score, use tie-breaker to stabilize the result.
-    if (result1 == result2){
-      boolean result = this.tieBreak(object1, object2);
+    if (result1 == result2) {
+      final boolean result = this.tieBreak(object1, object2);
       logger.debug("[TieBreaker] TieBreaker chose " +
-      (result? String.format("left side (%s)",  null== object1 ? "null": object1.toString()) :
-               String.format("right side (%s)", null== object2 ? "null": object2.toString()) ));
-      if (result) result1++; else result2++;
+          (result ? String.format("left side (%s)", null == object1 ? "null" : object1.toString()) :
+              String.format("right side (%s)", null == object2 ? "null" : object2.toString())));
+      if (result) {
+        result1++;
+      } else {
+        result2++;
+      }
     }
 
-    logger.debug(String.format("Result : %s vs %s ",result1,result2));
-    return new Pair<Integer,Integer>(result1,result2);
+    logger.debug(String.format("Result : %s vs %s ", result1, result2));
+    return new Pair<>(result1, result2);
   }
 
   @Override
-  public int compare(T o1, T o2) {
-    Pair<Integer,Integer> result = this.getComparisonScore(o1,o2);
-    return Objects.equals(result.getFirst(), result.getSecond()) ? 0 : result.getFirst() > result.getSecond() ? 1 : -1;
+  public int compare(final T o1, final T o2) {
+    final Pair<Integer, Integer> result = this.getComparisonScore(o1, o2);
+    return Objects.equals(result.getFirst(), result.getSecond()) ? 0
+        : result.getFirst() > result.getSecond() ? 1 : -1;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/CandidateFilter.java b/azkaban-common/src/main/java/azkaban/executor/selector/CandidateFilter.java
index f927a2a..0c7b70c 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/CandidateFilter.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/CandidateFilter.java
@@ -19,64 +19,72 @@ package azkaban.executor.selector;
 import java.util.Collection;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
-
 import org.apache.log4j.Logger;
 
 
-/** Abstract class for a candidate filter.
- *  this class contains implementation of most of the core logics. Implementing classes is expected only to
- *  register filters using the provided register function.
+/**
+ * Abstract class for a candidate filter. this class contains implementation of most of the core
+ * logics. Implementing classes is expected only to register filters using the provided register
+ * function.
  */
-public abstract class CandidateFilter<T,V>  {
+public abstract class CandidateFilter<T, V> {
+
   protected static Logger logger = Logger.getLogger(CandidateFilter.class);
 
   // internal repository of the registered filters .
-  private Map<String,FactorFilter<T,V>> factorFilterList =
-      new ConcurrentHashMap<String,FactorFilter<T,V>>();
+  private final Map<String, FactorFilter<T, V>> factorFilterList =
+      new ConcurrentHashMap<>();
 
-  /** gets the name of the current implementation of the candidate filter.
+  /**
+   * gets the name of the current implementation of the candidate filter.
+   *
    * @return : name of the filter.
-   * */
+   */
   public abstract String getName();
 
-  /** function to register a factorFilter to the internal Map for future reference.
+  /**
+   * function to register a factorFilter to the internal Map for future reference.
+   *
    * @param factorfilter : the Filter object to be registered.
-   * @throws IllegalArgumentException
-   * */
-  protected void registerFactorFilter(FactorFilter<T,V> filter){
-      if (null == filter ) {
-        throw new IllegalArgumentException("unable to register factor filter. " +
-                  "The passed comaractor is null or has an invalid weight value.");
-      }
+   */
+  protected void registerFactorFilter(final FactorFilter<T, V> filter) {
+    if (null == filter) {
+      throw new IllegalArgumentException("unable to register factor filter. " +
+          "The passed comaractor is null or has an invalid weight value.");
+    }
 
-      // add or replace the filter.
-      this.factorFilterList.put(filter.getFactorName(),filter);
-      logger.debug(String.format("Factor filter added for '%s'.",
-          filter.getFactorName()));
+    // add or replace the filter.
+    this.factorFilterList.put(filter.getFactorName(), filter);
+    logger.debug(String.format("Factor filter added for '%s'.",
+        filter.getFactorName()));
   }
 
-  /** function to analyze the target item according to the reference object to decide whether the item should be filtered.
-   * @param filteringTarget:   object to be checked.
-   * @param referencingObject: object which contains statistics based on which a decision is made whether
-   *                      the object being checked need to be filtered or not.
-   * @return true if the check passed, false if check failed, which means the item need to be filtered.
-   * */
-  public boolean filterTarget(T filteringTarget, V referencingObject){
+  /**
+   * function to analyze the target item according to the reference object to decide whether the
+   * item should be filtered.
+   *
+   * @param filteringTarget: object to be checked.
+   * @param referencingObject: object which contains statistics based on which a decision is made
+   * whether the object being checked need to be filtered or not.
+   * @return true if the check passed, false if check failed, which means the item need to be
+   * filtered.
+   */
+  public boolean filterTarget(final T filteringTarget, final V referencingObject) {
     logger.debug(String.format("start filtering '%s' with factor filter for '%s'",
         filteringTarget == null ? "(null)" : filteringTarget.toString(),
         this.getName()));
 
-    Collection<FactorFilter<T,V>> filterList = this.factorFilterList.values();
+    final Collection<FactorFilter<T, V>> filterList = this.factorFilterList.values();
     boolean result = true;
-    for (FactorFilter<T,V> filter : filterList){
-      result &= filter.filterTarget(filteringTarget,referencingObject);
+    for (final FactorFilter<T, V> filter : filterList) {
+      result &= filter.filterTarget(filteringTarget, referencingObject);
       logger.debug(String.format("[Factor: %s] filter result : %s ",
           filter.getFactorName(), result));
-      if (!result){
+      if (!result) {
         break;
       }
     }
-    logger.debug(String.format("Final filtering result : %s ",result));
+    logger.debug(String.format("Final filtering result : %s ", result));
     return result;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/CandidateSelector.java b/azkaban-common/src/main/java/azkaban/executor/selector/CandidateSelector.java
index 8fa91d0..f8ad1d9 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/CandidateSelector.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/CandidateSelector.java
@@ -21,67 +21,74 @@ import java.util.Collection;
 import java.util.Collections;
 import org.apache.log4j.Logger;
 
-/** Implementation of the CandidateSelector.
- *  @param K executor object type.
- *  @param V dispatching object type.
- * */
+/**
+ * Implementation of the CandidateSelector.
+ *
+ * @param K executor object type.
+ * @param V dispatching object type.
+ */
 public class CandidateSelector<K extends Comparable<K>, V> implements Selector<K, V> {
-  private static Logger logger = Logger.getLogger(CandidateComparator.class);
 
-  private CandidateFilter<K,V> filter;
-  private CandidateComparator<K> comparator;
+  private static final Logger logger = Logger.getLogger(CandidateComparator.class);
+
+  private final CandidateFilter<K, V> filter;
+  private final CandidateComparator<K> comparator;
 
-  /**constructor of the class.
+  /**
+   * constructor of the class.
+   *
    * @param filter CandidateFilter object to be used to perform the candidate filtering.
-   * @param comparator CandidateComparator object to be used to find the best suit candidate from the filtered list.
-   * */
-  public CandidateSelector(CandidateFilter<K,V> filter,
-      CandidateComparator<K> comparator){
+   * @param comparator CandidateComparator object to be used to find the best suit candidate from
+   * the filtered list.
+   */
+  public CandidateSelector(final CandidateFilter<K, V> filter,
+      final CandidateComparator<K> comparator) {
     this.filter = filter;
     this.comparator = comparator;
   }
 
   @Override
-  public K getBest(Collection<K> candidateList, V dispatchingObject) {
+  public K getBest(final Collection<K> candidateList, final V dispatchingObject) {
 
-     // shortcut if the candidateList is empty.
-     if ( null == candidateList || candidateList.size() == 0){
-       logger.error("failed to getNext candidate as the passed candidateList is null or empty.");
-       return null;
-     }
+    // shortcut if the candidateList is empty.
+    if (null == candidateList || candidateList.size() == 0) {
+      logger.error("failed to getNext candidate as the passed candidateList is null or empty.");
+      return null;
+    }
 
-     logger.debug("start candidate selection logic.");
-     logger.debug(String.format("candidate count before filtering: %s", candidateList.size()));
+    logger.debug("start candidate selection logic.");
+    logger.debug(String.format("candidate count before filtering: %s", candidateList.size()));
 
-     // to keep the input untouched, we will form up a new list based off the filtering result.
-     Collection<K> filteredList = new ArrayList<K>();
+    // to keep the input untouched, we will form up a new list based off the filtering result.
+    Collection<K> filteredList = new ArrayList<>();
 
-     if (null != this.filter){
-       for (K candidateInfo : candidateList){
-         if (filter.filterTarget(candidateInfo,dispatchingObject)){
-           filteredList.add(candidateInfo);
-         }
-       }
-     } else{
-       filteredList = candidateList;
-       logger.debug("skipping the candidate filtering as the filter object is not specifed.");
-     }
+    if (null != this.filter) {
+      for (final K candidateInfo : candidateList) {
+        if (this.filter.filterTarget(candidateInfo, dispatchingObject)) {
+          filteredList.add(candidateInfo);
+        }
+      }
+    } else {
+      filteredList = candidateList;
+      logger.debug("skipping the candidate filtering as the filter object is not specifed.");
+    }
 
-     logger.debug(String.format("candidate count after filtering: %s", filteredList.size()));
-     if (filteredList.size() == 0){
-       logger.debug("failed to select candidate as the filtered candidate list is empty.");
-       return null;
-     }
+    logger.debug(String.format("candidate count after filtering: %s", filteredList.size()));
+    if (filteredList.size() == 0) {
+      logger.debug("failed to select candidate as the filtered candidate list is empty.");
+      return null;
+    }
 
-     if (null == comparator){
-       logger.debug("candidate comparator is not specified, default hash code comparator class will be used.");
-     }
+    if (null == this.comparator) {
+      logger.debug(
+          "candidate comparator is not specified, default hash code comparator class will be used.");
+    }
 
-     // final work - find the best candidate from the filtered list.
-     K executor = Collections.max(filteredList, comparator);
-     logger.debug(String.format("candidate selected %s",
-         null == executor ? "(null)" : executor.toString()));
-     return executor;
+    // final work - find the best candidate from the filtered list.
+    final K executor = Collections.max(filteredList, this.comparator);
+    logger.debug(String.format("candidate selected %s",
+        null == executor ? "(null)" : executor.toString()));
+    return executor;
   }
 
   @Override
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorComparator.java b/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorComparator.java
index 1ea7ecb..25cbfa7 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorComparator.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorComparator.java
@@ -16,35 +16,27 @@
 
 package azkaban.executor.selector;
 
+import azkaban.executor.Executor;
+import azkaban.executor.ExecutorInfo;
 import java.util.Comparator;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
-import azkaban.executor.Executor;
-import azkaban.executor.ExecutorInfo;
-
 
 /**
- * De-normalized version of the CandidateComparator, which also contains the implementation of the factor comparators.
- * */
+ * De-normalized version of the CandidateComparator, which also contains the implementation of the
+ * factor comparators.
+ */
 public class ExecutorComparator extends CandidateComparator<Executor> {
-  private static Map<String, ComparatorCreator> comparatorCreatorRepository = null;
-
-  /**
-   * Gets the name list of all available comparators.
-   * @return the list of the names.
-   * */
-  public static Set<String> getAvailableComparatorNames(){
-    return comparatorCreatorRepository.keySet();
-  }
 
   // factor comparator names
   private static final String NUMOFASSIGNEDFLOW_COMPARATOR_NAME = "NumberOfAssignedFlowComparator";
   private static final String MEMORY_COMPARATOR_NAME = "Memory";
   private static final String LSTDISPATCHED_COMPARATOR_NAME = "LastDispatched";
   private static final String CPUUSAGE_COMPARATOR_NAME = "CpuUsage";
+  private static Map<String, ComparatorCreator> comparatorCreatorRepository = null;
 
   /**
    * static initializer of the class.
@@ -59,81 +51,91 @@ public class ExecutorComparator extends CandidateComparator<Executor> {
         ExecutorComparator::getNumberOfAssignedFlowComparator);
 
     // register the creator for memory comparator.
-    comparatorCreatorRepository.put(MEMORY_COMPARATOR_NAME, ExecutorComparator::getMemoryComparator);
+    comparatorCreatorRepository
+        .put(MEMORY_COMPARATOR_NAME, ExecutorComparator::getMemoryComparator);
 
     // register the creator for last dispatched time comparator.
-    comparatorCreatorRepository.put(LSTDISPATCHED_COMPARATOR_NAME, ExecutorComparator::getLstDispatchedTimeComparator);
+    comparatorCreatorRepository
+        .put(LSTDISPATCHED_COMPARATOR_NAME, ExecutorComparator::getLstDispatchedTimeComparator);
 
     // register the creator for CPU Usage comparator.
-    comparatorCreatorRepository.put(CPUUSAGE_COMPARATOR_NAME, ExecutorComparator::getCpuUsageComparator);
+    comparatorCreatorRepository
+        .put(CPUUSAGE_COMPARATOR_NAME, ExecutorComparator::getCpuUsageComparator);
   }
 
-
   /**
    * constructor of the ExecutorComparator.
-   * @param comparatorList   the list of comparator, plus its weight information to be registered,
-   *  the parameter must be a not-empty and valid list object.
-   * */
-  public ExecutorComparator(Map<String,Integer> comparatorList) {
-    if (null == comparatorList|| comparatorList.size() == 0){
+   *
+   * @param comparatorList the list of comparator, plus its weight information to be registered, the
+   * parameter must be a not-empty and valid list object.
+   */
+  public ExecutorComparator(final Map<String, Integer> comparatorList) {
+    if (null == comparatorList || comparatorList.size() == 0) {
       throw new IllegalArgumentException("failed to initialize executor comparator" +
-                                         "as the passed comparator list is invalid or empty.");
+          "as the passed comparator list is invalid or empty.");
     }
 
     // register the comparators, we will now throw here if the weight is invalid, it is handled in the super.
-    for (Entry<String,Integer> entry : comparatorList.entrySet()){
-      if (comparatorCreatorRepository.containsKey(entry.getKey())){
+    for (final Entry<String, Integer> entry : comparatorList.entrySet()) {
+      if (comparatorCreatorRepository.containsKey(entry.getKey())) {
         this.registerFactorComparator(comparatorCreatorRepository.
             get(entry.getKey()).
             create(entry.getValue()));
       } else {
-        throw new IllegalArgumentException(String.format("failed to initialize executor comparator " +
-                                        "as the comparator implementation for requested factor '%s' doesn't exist.",
-                                        entry.getKey()));
+        throw new IllegalArgumentException(
+            String.format("failed to initialize executor comparator " +
+                    "as the comparator implementation for requested factor '%s' doesn't exist.",
+                entry.getKey()));
       }
     }
   }
 
-  @Override
-  public String getName() {
-    return "ExecutorComparator";
-  }
-
-  private interface ComparatorCreator{
-    FactorComparator<Executor> create(int weight);
+  /**
+   * Gets the name list of all available comparators.
+   *
+   * @return the list of the names.
+   */
+  public static Set<String> getAvailableComparatorNames() {
+    return comparatorCreatorRepository.keySet();
   }
 
-  /**<pre>
-   * helper function that does the object  on two statistics, comparator can leverage this function to provide
+  /**
+   * <pre>
+   * helper function that does the object  on two statistics, comparator can leverage this function
+   * to provide
    * shortcuts if   the statistics object is missing from one or both sides of the executors.
    * </pre>
-   * @param stat1   the first statistics  object to be checked .
-   * @param stat2   the second statistics object to be checked.
-   * @param caller  the name of the calling function, for logging purpose.
-   * @return true if the passed statistics are NOT both valid, a shortcut can be made (caller can consume the result),
-   *         false otherwise.
-   * */
-  private static boolean statisticsObjectCheck(ExecutorInfo statisticsObj1, ExecutorInfo statisticsObj2, String caller){
+   *
+   * @param stat1 the first statistics  object to be checked .
+   * @param stat2 the second statistics object to be checked.
+   * @param caller the name of the calling function, for logging purpose.
+   * @return true if the passed statistics are NOT both valid, a shortcut can be made (caller can
+   * consume the result), false otherwise.
+   */
+  private static boolean statisticsObjectCheck(final ExecutorInfo statisticsObj1,
+      final ExecutorInfo statisticsObj2, final String caller) {
     // both doesn't expose the info
-    if (null == statisticsObj1 && null == statisticsObj2){
+    if (null == statisticsObj1 && null == statisticsObj2) {
       logger.debug(String.format("%s : neither of the executors exposed statistics info.",
           caller));
       return true;
     }
 
     //right side doesn't expose the info.
-    if (null == statisticsObj2 ){
-        logger.debug(String.format("%s : choosing left side and the right side executor doesn't expose statistics info",
-            caller));
+    if (null == statisticsObj2) {
+      logger.debug(String.format(
+          "%s : choosing left side and the right side executor doesn't expose statistics info",
+          caller));
       return true;
     }
 
     //left side doesn't expose the info.
-    if (null == statisticsObj1 ){
-      logger.debug(String.format("%s : choosing right side and the left side executor doesn't expose statistics info",
+    if (null == statisticsObj1) {
+      logger.debug(String.format(
+          "%s : choosing right side and the left side executor doesn't expose statistics info",
           caller));
       return true;
-      }
+    }
 
     // both not null
     return false;
@@ -141,98 +143,116 @@ public class ExecutorComparator extends CandidateComparator<Executor> {
 
   /**
    * function defines the number of assigned flow comparator.
+   *
    * @param weight weight of the comparator.
-   * */
-  private static FactorComparator<Executor> getNumberOfAssignedFlowComparator(int weight){
-    return FactorComparator.create(NUMOFASSIGNEDFLOW_COMPARATOR_NAME, weight, new Comparator<Executor>(){
-
-      @Override
-      public int compare(Executor o1, Executor o2) {
-        ExecutorInfo stat1 = o1.getExecutorInfo();
-        ExecutorInfo stat2 = o2.getExecutorInfo();
-
-        Integer result = 0;
-        if (statisticsObjectCheck(stat1,stat2,NUMOFASSIGNEDFLOW_COMPARATOR_NAME)){
-          return result;
-        }
-        return ((Integer)stat1.getRemainingFlowCapacity()).compareTo(stat2.getRemainingFlowCapacity());
-      }});
+   */
+  private static FactorComparator<Executor> getNumberOfAssignedFlowComparator(final int weight) {
+    return FactorComparator
+        .create(NUMOFASSIGNEDFLOW_COMPARATOR_NAME, weight, new Comparator<Executor>() {
+
+          @Override
+          public int compare(final Executor o1, final Executor o2) {
+            final ExecutorInfo stat1 = o1.getExecutorInfo();
+            final ExecutorInfo stat2 = o2.getExecutorInfo();
+
+            final Integer result = 0;
+            if (statisticsObjectCheck(stat1, stat2, NUMOFASSIGNEDFLOW_COMPARATOR_NAME)) {
+              return result;
+            }
+            return ((Integer) stat1.getRemainingFlowCapacity())
+                .compareTo(stat2.getRemainingFlowCapacity());
+          }
+        });
   }
 
   /**
    * function defines the cpuUsage comparator.
+   *
    * @param weight weight of the comparator.
-   * @return
-   * */
-  private static FactorComparator<Executor> getCpuUsageComparator(int weight){
-    return FactorComparator.create(CPUUSAGE_COMPARATOR_NAME, weight, new Comparator<Executor>(){
+   */
+  private static FactorComparator<Executor> getCpuUsageComparator(final int weight) {
+    return FactorComparator.create(CPUUSAGE_COMPARATOR_NAME, weight, new Comparator<Executor>() {
 
       @Override
-      public int compare(Executor o1, Executor o2) {
-        ExecutorInfo stat1 = o1.getExecutorInfo();
-        ExecutorInfo stat2 = o2.getExecutorInfo();
+      public int compare(final Executor o1, final Executor o2) {
+        final ExecutorInfo stat1 = o1.getExecutorInfo();
+        final ExecutorInfo stat2 = o2.getExecutorInfo();
 
-        int result = 0;
-        if (statisticsObjectCheck(stat1,stat2,CPUUSAGE_COMPARATOR_NAME)){
+        final int result = 0;
+        if (statisticsObjectCheck(stat1, stat2, CPUUSAGE_COMPARATOR_NAME)) {
           return result;
         }
 
         // CPU usage , the lesser the value is, the better.
-        return ((Double)stat2.getCpuUsage()).compareTo(stat1.getCpuUsage());
-      }});
+        return ((Double) stat2.getCpuUsage()).compareTo(stat1.getCpuUsage());
+      }
+    });
   }
 
-
   /**
    * function defines the last dispatched time comparator.
+   *
    * @param weight weight of the comparator.
-   * @return
-   * */
-  private static FactorComparator<Executor> getLstDispatchedTimeComparator(int weight){
-    return FactorComparator.create(LSTDISPATCHED_COMPARATOR_NAME, weight, new Comparator<Executor>(){
-
-      @Override
-      public int compare(Executor o1, Executor o2) {
-        ExecutorInfo stat1 = o1.getExecutorInfo();
-        ExecutorInfo stat2 = o2.getExecutorInfo();
-
-        int result = 0;
-        if (statisticsObjectCheck(stat1,stat2,LSTDISPATCHED_COMPARATOR_NAME)){
-          return result;
-        }
-        // Note: an earlier date time indicates higher weight.
-        return ((Long)stat2.getLastDispatchedTime()).compareTo(stat1.getLastDispatchedTime());
-      }});
+   */
+  private static FactorComparator<Executor> getLstDispatchedTimeComparator(final int weight) {
+    return FactorComparator
+        .create(LSTDISPATCHED_COMPARATOR_NAME, weight, new Comparator<Executor>() {
+
+          @Override
+          public int compare(final Executor o1, final Executor o2) {
+            final ExecutorInfo stat1 = o1.getExecutorInfo();
+            final ExecutorInfo stat2 = o2.getExecutorInfo();
+
+            final int result = 0;
+            if (statisticsObjectCheck(stat1, stat2, LSTDISPATCHED_COMPARATOR_NAME)) {
+              return result;
+            }
+            // Note: an earlier date time indicates higher weight.
+            return ((Long) stat2.getLastDispatchedTime()).compareTo(stat1.getLastDispatchedTime());
+          }
+        });
   }
 
-
-  /**<pre>
+  /**
+   * <pre>
    * function defines the Memory comparator.
-   * Note: comparator firstly take the absolute value of the remaining memory, if both sides have the same value,
+   * Note: comparator firstly take the absolute value of the remaining memory, if both sides have
+   * the same value,
    *       it go further to check the percent of the remaining memory.
    * </pre>
+   *
    * @param weight weight of the comparator.
-
-   * @return
-   * */
-  private static FactorComparator<Executor> getMemoryComparator(int weight){
-    return FactorComparator.create(MEMORY_COMPARATOR_NAME, weight, new Comparator<Executor>(){
+   */
+  private static FactorComparator<Executor> getMemoryComparator(final int weight) {
+    return FactorComparator.create(MEMORY_COMPARATOR_NAME, weight, new Comparator<Executor>() {
 
       @Override
-      public int compare(Executor o1, Executor o2) {
-       ExecutorInfo stat1 = o1.getExecutorInfo();
-       ExecutorInfo stat2 = o2.getExecutorInfo();
+      public int compare(final Executor o1, final Executor o2) {
+        final ExecutorInfo stat1 = o1.getExecutorInfo();
+        final ExecutorInfo stat2 = o2.getExecutorInfo();
+
+        final int result = 0;
+        if (statisticsObjectCheck(stat1, stat2, MEMORY_COMPARATOR_NAME)) {
+          return result;
+        }
+
+        if (stat1.getRemainingMemoryInMB() != stat2.getRemainingMemoryInMB()) {
+          return stat1.getRemainingMemoryInMB() > stat2.getRemainingMemoryInMB() ? 1 : -1;
+        }
+
+        return Double.compare(stat1.getRemainingMemoryPercent(), stat2.getRemainingMemoryPercent());
+      }
+    });
+  }
+
+  @Override
+  public String getName() {
+    return "ExecutorComparator";
+  }
 
-       int result = 0;
-       if (statisticsObjectCheck(stat1,stat2,MEMORY_COMPARATOR_NAME)){
-         return result;
-       }
 
-       if (stat1.getRemainingMemoryInMB() != stat2.getRemainingMemoryInMB()){
-         return stat1.getRemainingMemoryInMB() > stat2.getRemainingMemoryInMB() ? 1:-1;
-       }
+  private interface ComparatorCreator {
 
-       return Double.compare(stat1.getRemainingMemoryPercent(), stat2.getRemainingMemoryPercent());
-      }});
+    FactorComparator<Executor> create(int weight);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorFilter.java b/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorFilter.java
index 1598d27..18260d9 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorFilter.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorFilter.java
@@ -16,34 +16,25 @@
 
 package azkaban.executor.selector;
 
+import azkaban.executor.ExecutableFlow;
+import azkaban.executor.Executor;
+import azkaban.executor.ExecutorInfo;
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
 
-import azkaban.executor.ExecutableFlow;
-import azkaban.executor.Executor;
-import azkaban.executor.ExecutorInfo;
-
 /**
- * De-normalized version of the candidateFilter, which also contains the implementation of the factor filters.
- * */
+ * De-normalized version of the candidateFilter, which also contains the implementation of the
+ * factor filters.
+ */
 public final class ExecutorFilter extends CandidateFilter<Executor, ExecutableFlow> {
-  private static Map<String, FactorFilter<Executor, ExecutableFlow>> filterRepository = null;
-
-  /**
-   * Gets the name list of all available filters.
-   * @return the list of the names.
-   * */
-  public static Set<String> getAvailableFilterNames(){
-    return filterRepository.keySet();
-  }
-
 
   // factor filter names.
   private static final String STATICREMAININGFLOWSIZE_FILTER_NAME = "StaticRemainingFlowSize";
   private static final String MINIMUMFREEMEMORY_FILTER_NAME = "MinimumFreeMemory";
   private static final String CPUSTATUS_FILTER_NAME = "CpuStatus";
+  private static Map<String, FactorFilter<Executor, ExecutableFlow>> filterRepository = null;
 
   /**<pre>
    * static initializer of the class.
@@ -60,117 +51,142 @@ public final class ExecutorFilter extends CandidateFilter<Executor, ExecutableFl
 
   /**
    * constructor of the ExecutorFilter.
-   * @param filterList   the list of filter to be registered, the parameter must be a not-empty and valid list object.
-   * */
-  public ExecutorFilter(Collection<String> filterList) {
+   *
+   * @param filterList the list of filter to be registered, the parameter must be a not-empty and
+   * valid list object.
+   */
+  public ExecutorFilter(final Collection<String> filterList) {
     // shortcut if the filter list is invalid. A little bit ugly to have to throw in constructor.
-    if (null == filterList || filterList.size() == 0){
-      logger.error("failed to initialize executor filter as the passed filter list is invalid or empty.");
+    if (null == filterList || filterList.size() == 0) {
+      logger.error(
+          "failed to initialize executor filter as the passed filter list is invalid or empty.");
       throw new IllegalArgumentException("filterList");
     }
 
     // register the filters according to the list.
-    for (String filterName : filterList){
-      if (filterRepository.containsKey(filterName)){
+    for (final String filterName : filterList) {
+      if (filterRepository.containsKey(filterName)) {
         this.registerFactorFilter(filterRepository.get(filterName));
       } else {
-        logger.error(String.format("failed to initialize executor filter "+
-                                   "as the filter implementation for requested factor '%s' doesn't exist.",
-                                   filterName));
+        logger.error(String.format("failed to initialize executor filter " +
+                "as the filter implementation for requested factor '%s' doesn't exist.",
+            filterName));
         throw new IllegalArgumentException("filterList");
       }
     }
   }
 
-  @Override
-  public String getName() {
-    return "ExecutorFilter";
+  /**
+   * Gets the name list of all available filters.
+   *
+   * @return the list of the names.
+   */
+  public static Set<String> getAvailableFilterNames() {
+    return filterRepository.keySet();
   }
 
-  /**<pre>
+  /**
+   * <pre>
    * function to register the static remaining flow size filter.
-   * NOTE : this is a static filter which means the filter will be filtering based on the system standard which is not
+   * NOTE : this is a static filter which means the filter will be filtering based on the system
+   * standard which is not
    *        Coming for the passed flow.
-   *        Ideally this filter will make sure only the executor hasn't reached the Max allowed # of executing flows.
-   *</pre>
-   * */
-  private static FactorFilter<Executor, ExecutableFlow> getStaticRemainingFlowSizeFilter(){
-    return FactorFilter.create(STATICREMAININGFLOWSIZE_FILTER_NAME, (filteringTarget, referencingObject) -> {
-      if (null == filteringTarget){
-        logger.debug(String.format("%s : filtering out the target as it is null.", STATICREMAININGFLOWSIZE_FILTER_NAME));
-        return false;
-      }
-
-      ExecutorInfo stats = filteringTarget.getExecutorInfo();
-      if (null == stats) {
-        logger.debug(String.format("%s : filtering out %s as it's stats is unavailable.",
-            STATICREMAININGFLOWSIZE_FILTER_NAME,
-            filteringTarget.toString()));
-        return false;
-      }
-      return stats.getRemainingFlowCapacity() > 0 ;
-     });
+   *        Ideally this filter will make sure only the executor hasn't reached the Max allowed # of
+   * executing flows.
+   * </pre>
+   */
+  private static FactorFilter<Executor, ExecutableFlow> getStaticRemainingFlowSizeFilter() {
+    return FactorFilter
+        .create(STATICREMAININGFLOWSIZE_FILTER_NAME, (filteringTarget, referencingObject) -> {
+          if (null == filteringTarget) {
+            logger.debug(String.format("%s : filtering out the target as it is null.",
+                STATICREMAININGFLOWSIZE_FILTER_NAME));
+            return false;
+          }
+
+          final ExecutorInfo stats = filteringTarget.getExecutorInfo();
+          if (null == stats) {
+            logger.debug(String.format("%s : filtering out %s as it's stats is unavailable.",
+                STATICREMAININGFLOWSIZE_FILTER_NAME,
+                filteringTarget.toString()));
+            return false;
+          }
+          return stats.getRemainingFlowCapacity() > 0;
+        });
   }
 
-  /**<pre>
+  /**
+   * <pre>
    * function to register the static Minimum Reserved Memory filter.
-   * NOTE : this is a static filter which means the filter will be filtering based on the system standard which is not
+   * NOTE : this is a static filter which means the filter will be filtering based on the system
+   * standard which is not
    *        Coming for the passed flow.
    *        This filter will filter out any executors that has the remaining  memory below 6G
-   *</pre>
-   * */
-  private static FactorFilter<Executor, ExecutableFlow> getMinimumReservedMemoryFilter(){
-    return FactorFilter.create(MINIMUMFREEMEMORY_FILTER_NAME, new FactorFilter.Filter<Executor, ExecutableFlow>() {
-      private static final int MINIMUM_FREE_MEMORY = 6 * 1024;
-
-      @Override
-      public boolean filterTarget(Executor filteringTarget, ExecutableFlow referencingObject) {
-        if (null == filteringTarget){
-          logger.debug(String.format("%s : filtering out the target as it is null.", MINIMUMFREEMEMORY_FILTER_NAME));
-          return false;
-        }
-
-        ExecutorInfo stats = filteringTarget.getExecutorInfo();
-        if (null == stats) {
-          logger.debug(String.format("%s : filtering out %s as it's stats is unavailable.",
-              MINIMUMFREEMEMORY_FILTER_NAME,
-              filteringTarget.toString()));
-          return false;
-        }
-        return stats.getRemainingMemoryInMB() > MINIMUM_FREE_MEMORY ;
-       }
-    });
+   * </pre>
+   */
+  private static FactorFilter<Executor, ExecutableFlow> getMinimumReservedMemoryFilter() {
+    return FactorFilter
+        .create(MINIMUMFREEMEMORY_FILTER_NAME, new FactorFilter.Filter<Executor, ExecutableFlow>() {
+          private static final int MINIMUM_FREE_MEMORY = 6 * 1024;
+
+          @Override
+          public boolean filterTarget(final Executor filteringTarget,
+              final ExecutableFlow referencingObject) {
+            if (null == filteringTarget) {
+              logger.debug(String.format("%s : filtering out the target as it is null.",
+                  MINIMUMFREEMEMORY_FILTER_NAME));
+              return false;
+            }
+
+            final ExecutorInfo stats = filteringTarget.getExecutorInfo();
+            if (null == stats) {
+              logger.debug(String.format("%s : filtering out %s as it's stats is unavailable.",
+                  MINIMUMFREEMEMORY_FILTER_NAME,
+                  filteringTarget.toString()));
+              return false;
+            }
+            return stats.getRemainingMemoryInMB() > MINIMUM_FREE_MEMORY;
+          }
+        });
   }
 
-
   /**
    * <pre>
    * function to register the static Minimum Reserved Memory filter.
-   * NOTE :  this is a static filter which means the filter will be filtering based on the system standard which
+   * NOTE :  this is a static filter which means the filter will be filtering based on the system
+   * standard which
    *        is not Coming for the passed flow.
    *        This filter will filter out any executors that the current CPU usage exceed 95%
    * </pre>
-   * */
-  private static FactorFilter<Executor, ExecutableFlow> getCpuStatusFilter(){
-    return FactorFilter.create(CPUSTATUS_FILTER_NAME, new FactorFilter.Filter<Executor, ExecutableFlow>() {
-      private static final int MAX_CPU_CURRENT_USAGE = 95;
-
-      @Override
-      public boolean filterTarget(Executor filteringTarget, ExecutableFlow referencingObject) {
-        if (null == filteringTarget){
-          logger.debug(String.format("%s : filtering out the target as it is null.", CPUSTATUS_FILTER_NAME));
-          return false;
-        }
-
-        ExecutorInfo stats = filteringTarget.getExecutorInfo();
-        if (null == stats) {
-          logger.debug(String.format("%s : filtering out %s as it's stats is unavailable.",
-              CPUSTATUS_FILTER_NAME,
-              filteringTarget.toString()));
-          return false;
-        }
-        return stats.getCpuUsage() < MAX_CPU_CURRENT_USAGE ;
-       }
-    });
+   */
+  private static FactorFilter<Executor, ExecutableFlow> getCpuStatusFilter() {
+    return FactorFilter
+        .create(CPUSTATUS_FILTER_NAME, new FactorFilter.Filter<Executor, ExecutableFlow>() {
+          private static final int MAX_CPU_CURRENT_USAGE = 95;
+
+          @Override
+          public boolean filterTarget(final Executor filteringTarget,
+              final ExecutableFlow referencingObject) {
+            if (null == filteringTarget) {
+              logger.debug(String
+                  .format("%s : filtering out the target as it is null.", CPUSTATUS_FILTER_NAME));
+              return false;
+            }
+
+            final ExecutorInfo stats = filteringTarget.getExecutorInfo();
+            if (null == stats) {
+              logger.debug(String.format("%s : filtering out %s as it's stats is unavailable.",
+                  CPUSTATUS_FILTER_NAME,
+                  filteringTarget.toString()));
+              return false;
+            }
+            return stats.getCpuUsage() < MAX_CPU_CURRENT_USAGE;
+          }
+        });
+  }
+
+  @Override
+  public String getName() {
+    return "ExecutorFilter";
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorSelector.java b/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorSelector.java
index 2405c28..6089104 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorSelector.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/ExecutorSelector.java
@@ -16,30 +16,33 @@
 
 package azkaban.executor.selector;
 
-import java.util.Collection;
-import java.util.Map;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.Executor;
+import java.util.Collection;
+import java.util.Map;
 
-/**<pre>
+/**
+ * <pre>
  * Executor selector class implementation.
  * NOTE: This class is a de-generalized version of the CandidateSelector, which provides a
  *       clean and convenient constructor to take in filter and comparator name list and build
  *       the instance from that.
- *</pre>
- * */
+ * </pre>
+ */
 public class ExecutorSelector extends CandidateSelector<Executor, ExecutableFlow> {
 
   /**
    * Contractor of the class.
-   * @param filterList      name list of the filters to be registered,
-   *                        filter feature will be disabled if a null value is passed.
-   * @param comparatorList  name/weight pair list of the comparators to be registered ,
-   *                        again comparator feature is disabled if a null value is passed.
-   * */
-  public ExecutorSelector(Collection<String> filterList, Map<String,Integer> comparatorList) {
-    super(null == filterList || filterList.isEmpty() ?         null : new ExecutorFilter(filterList),
-          null == comparatorList || comparatorList.isEmpty() ? null : new ExecutorComparator(comparatorList));
+   *
+   * @param filterList name list of the filters to be registered, filter feature will be disabled if
+   * a null value is passed.
+   * @param comparatorList name/weight pair list of the comparators to be registered , again
+   * comparator feature is disabled if a null value is passed.
+   */
+  public ExecutorSelector(final Collection<String> filterList,
+      final Map<String, Integer> comparatorList) {
+    super(null == filterList || filterList.isEmpty() ? null : new ExecutorFilter(filterList),
+        null == comparatorList || comparatorList.isEmpty() ? null
+            : new ExecutorComparator(comparatorList));
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/FactorComparator.java b/azkaban-common/src/main/java/azkaban/executor/selector/FactorComparator.java
index 6d4d2e0..e1cfab2 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/FactorComparator.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/FactorComparator.java
@@ -19,58 +19,67 @@ package azkaban.executor.selector;
 import java.util.Comparator;
 import org.apache.log4j.Logger;
 
-/** wrapper class for a factor comparator .
- *@param T: the type of the objects to be compared.
+/**
+ * wrapper class for a factor comparator .
+ *
+ * @param T: the type of the objects to be compared.
  */
-public final class FactorComparator<T>{
-  private static Logger logger = Logger.getLogger(CandidateComparator.class);
+public final class FactorComparator<T> {
+
+  private static final Logger logger = Logger.getLogger(CandidateComparator.class);
 
-  private String factorName;
+  private final String factorName;
+  private final Comparator<T> comparator;
   private int weight;
-  private Comparator<T> comparator;
 
-  /** private constructor of the class. User will create the instance of the class by calling the static
-   *  method provided below.
+  /**
+   * private constructor of the class. User will create the instance of the class by calling the
+   * static method provided below.
+   *
    * @param factorName : the factor name .
    * @param weight : the weight of the comparator.
    * @param comparator : function to be provided by user on how the comparison should be made.
-   * */
-  private FactorComparator(String factorName, int weight, Comparator<T> comparator){
+   */
+  private FactorComparator(final String factorName, final int weight,
+      final Comparator<T> comparator) {
     this.factorName = factorName;
     this.weight = weight;
     this.comparator = comparator;
   }
 
-  /** static function to generate an instance of the class.
-   *  refer to the constructor for the param definitions.
-   * */
-  public static <T> FactorComparator<T> create(String factorName, int weight, Comparator<T> comparator){
+  /**
+   * static function to generate an instance of the class.
+   * refer to the constructor for the param definitions.
+   */
+  public static <T> FactorComparator<T> create(final String factorName, final int weight,
+      final Comparator<T> comparator) {
 
-    if (null == factorName || factorName.length() == 0 || weight < 0 || null == comparator){
-      logger.error("failed to create instance of FactorComparator, at least one of the input paramters are invalid");
+    if (null == factorName || factorName.length() == 0 || weight < 0 || null == comparator) {
+      logger.error(
+          "failed to create instance of FactorComparator, at least one of the input paramters are invalid");
       return null;
     }
 
-    return new FactorComparator<T>(factorName,weight,comparator);
+    return new FactorComparator<>(factorName, weight, comparator);
   }
 
   // function to return the factor name.
-  public String getFactorName(){
+  public String getFactorName() {
     return this.factorName;
   }
 
   // function to return the weight value.
-  public int getWeight(){
+  public int getWeight() {
     return this.weight;
   }
 
   // function to return the weight value.
-  public void updateWeight(int value){
+  public void updateWeight(final int value) {
     this.weight = value;
   }
 
   // the actual compare function, which will leverage the user defined function.
-  public int compare(T object1, T object2){
+  public int compare(final T object1, final T object2) {
     return this.comparator.compare(object1, object2);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/FactorFilter.java b/azkaban-common/src/main/java/azkaban/executor/selector/FactorFilter.java
index 3cc0154..97fb89f 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/FactorFilter.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/FactorFilter.java
@@ -18,58 +18,70 @@ package azkaban.executor.selector;
 
 import org.apache.log4j.Logger;
 
-/** wrapper class for a factor Filter .
- *@param T: the type of the objects to be compared.
- *@param V: the type of the object to be used for filtering.
+/**
+ * wrapper class for a factor Filter .
+ *
+ * @param T: the type of the objects to be compared.
+ * @param V: the type of the object to be used for filtering.
  */
-public final class FactorFilter<T,V>{
-  private static Logger logger = Logger.getLogger(FactorFilter.class);
+public final class FactorFilter<T, V> {
+
+  private static final Logger logger = Logger.getLogger(FactorFilter.class);
 
-  private String factorName;
-  private Filter<T,V> filter;
+  private final String factorName;
+  private final Filter<T, V> filter;
 
-  /** private constructor of the class. User will create the instance of the class by calling the static
-   *  method provided below.
+  /**
+   * private constructor of the class. User will create the instance of the class by calling the
+   * static method provided below.
+   *
    * @param factorName : the factor name .
    * @param filter : user defined function specifying how the filtering should be implemented.
-   * */
-  private FactorFilter(String factorName, Filter<T,V> filter){
+   */
+  private FactorFilter(final String factorName, final Filter<T, V> filter) {
     this.factorName = factorName;
     this.filter = filter;
   }
 
-  /** static function to generate an instance of the class.
-   *  refer to the constructor for the param definitions.
-   * */
-  public static <T,V> FactorFilter<T,V> create(String factorName, Filter<T,V> filter){
+  /**
+   * static function to generate an instance of the class.
+   * refer to the constructor for the param definitions.
+   */
+  public static <T, V> FactorFilter<T, V> create(final String factorName,
+      final Filter<T, V> filter) {
 
-    if (null == factorName || factorName.length() == 0 || null == filter){
-      logger.error("failed to create instance of FactorFilter, at least one of the input paramters are invalid");
+    if (null == factorName || factorName.length() == 0 || null == filter) {
+      logger.error(
+          "failed to create instance of FactorFilter, at least one of the input paramters are invalid");
       return null;
     }
 
-    return new FactorFilter<T,V>(factorName,filter);
+    return new FactorFilter<>(factorName, filter);
   }
 
   // function to return the factor name.
-  public String getFactorName(){
+  public String getFactorName() {
     return this.factorName;
   }
 
   // the actual check function, which will leverage the logic defined by user.
-  public boolean filterTarget(T filteringTarget, V referencingObject){
+  public boolean filterTarget(final T filteringTarget, final V referencingObject) {
     return this.filter.filterTarget(filteringTarget, referencingObject);
   }
 
   // interface of the filter.
-  public interface Filter<T,V>{
+  public interface Filter<T, V> {
 
-    /**function to analyze the target item according to the reference object to decide whether the item should be filtered.
-     * @param filteringTarget   object to be checked.
-     * @param referencingObject object which contains statistics based on which a decision is made whether
-     *                      the object being checked need to be filtered or not.
-     * @return true if the check passed, false if check failed, which means the item need to be filtered.
-     * */
+    /**
+     * function to analyze the target item according to the reference object to decide whether the
+     * item should be filtered.
+     *
+     * @param filteringTarget object to be checked.
+     * @param referencingObject object which contains statistics based on which a decision is made
+     * whether the object being checked need to be filtered or not.
+     * @return true if the check passed, false if check failed, which means the item need to be
+     * filtered.
+     */
     boolean filterTarget(T filteringTarget, V referencingObject);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/selector/Selector.java b/azkaban-common/src/main/java/azkaban/executor/selector/Selector.java
index a56b41a..06f1ab4 100644
--- a/azkaban-common/src/main/java/azkaban/executor/selector/Selector.java
+++ b/azkaban-common/src/main/java/azkaban/executor/selector/Selector.java
@@ -19,25 +19,32 @@ package azkaban.executor.selector;
 import java.util.Collection;
 
 
-/**<pre>
+/**
+ * <pre>
  *  Definition of the selector interface.
  *  an implementation of the selector interface provides the functionality
  *  to return a candidate from the candidateList that suits best for the dispatchingObject.
  * </pre>
- *  @param K : type of the candidate.
- *  @param V : type of the dispatching object.
+ *
+ * @param K : type of the candidate.
+ * @param V : type of the dispatching object.
  */
-public interface Selector <K extends Comparable<K>,V> {
+public interface Selector<K extends Comparable<K>, V> {
 
-  /** Function returns the next best suit candidate from the candidateList for the dispatching object.
-   *  @param  candidateList : List of the candidates to select from .
-   *  @param  dispatchingObject : the object to be dispatched .
-   *  @return candidate from the candidate list that suits best for the dispatching object.
-   * */
+  /**
+   * Function returns the next best suit candidate from the candidateList for the dispatching
+   * object.
+   *
+   * @param candidateList : List of the candidates to select from .
+   * @param dispatchingObject : the object to be dispatched .
+   * @return candidate from the candidate list that suits best for the dispatching object.
+   */
   public K getBest(Collection<K> candidateList, V dispatchingObject);
 
-  /** Function returns the name of the current Dispatcher
-   *  @return name of the dispatcher.
-   * */
+  /**
+   * Function returns the name of the current Dispatcher
+   *
+   * @return name of the dispatcher.
+   */
   public String getName();
 }
diff --git a/azkaban-common/src/main/java/azkaban/executor/Status.java b/azkaban-common/src/main/java/azkaban/executor/Status.java
index 9df3f8e..26ae3a5 100644
--- a/azkaban-common/src/main/java/azkaban/executor/Status.java
+++ b/azkaban-common/src/main/java/azkaban/executor/Status.java
@@ -33,69 +33,69 @@ public enum Status {
 
   private final int numVal;
 
-  Status(int numVal) {
+  Status(final int numVal) {
     this.numVal = numVal;
   }
 
-  public int getNumVal() {
-    return numVal;
-  }
-
-  public static Status fromInteger(int x) {
+  public static Status fromInteger(final int x) {
     switch (x) {
-    case 10:
-      return READY;
-    case 20:
-      return PREPARING;
-    case 30:
-      return RUNNING;
-    case 40:
-      return PAUSED;
-    case 50:
-      return SUCCEEDED;
-    case 60:
-      return KILLED;
-    case 70:
-      return FAILED;
-    case 80:
-      return FAILED_FINISHING;
-    case 90:
-      return SKIPPED;
-    case 100:
-      return DISABLED;
-    case 110:
-      return QUEUED;
-    case 120:
-      return FAILED_SUCCEEDED;
-    case 130:
-      return CANCELLED;
-    default:
-      return READY;
+      case 10:
+        return READY;
+      case 20:
+        return PREPARING;
+      case 30:
+        return RUNNING;
+      case 40:
+        return PAUSED;
+      case 50:
+        return SUCCEEDED;
+      case 60:
+        return KILLED;
+      case 70:
+        return FAILED;
+      case 80:
+        return FAILED_FINISHING;
+      case 90:
+        return SKIPPED;
+      case 100:
+        return DISABLED;
+      case 110:
+        return QUEUED;
+      case 120:
+        return FAILED_SUCCEEDED;
+      case 130:
+        return CANCELLED;
+      default:
+        return READY;
     }
   }
 
-  public static boolean isStatusFinished(Status status) {
+  public static boolean isStatusFinished(final Status status) {
     switch (status) {
-    case FAILED:
-    case KILLED:
-    case SUCCEEDED:
-    case SKIPPED:
-    case FAILED_SUCCEEDED:
-    case CANCELLED:
-      return true;
-    default:
-      return false;
+      case FAILED:
+      case KILLED:
+      case SUCCEEDED:
+      case SKIPPED:
+      case FAILED_SUCCEEDED:
+      case CANCELLED:
+        return true;
+      default:
+        return false;
     }
   }
 
-  public static boolean isStatusRunning(Status status) {
+  public static boolean isStatusRunning(final Status status) {
     switch (status) {
-    case RUNNING:
-    case FAILED_FINISHING:
-    case QUEUED:
-      return true;
-    default:
-      return false;
+      case RUNNING:
+      case FAILED_FINISHING:
+      case QUEUED:
+        return true;
+      default:
+        return false;
     }
   }
+
+  public int getNumVal() {
+    return this.numVal;
+  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/flow/CommonJobProperties.java b/azkaban-common/src/main/java/azkaban/flow/CommonJobProperties.java
index 8c208a5..f99fd4f 100644
--- a/azkaban-common/src/main/java/azkaban/flow/CommonJobProperties.java
+++ b/azkaban-common/src/main/java/azkaban/flow/CommonJobProperties.java
@@ -133,7 +133,7 @@ public class CommonJobProperties {
    * hotspot occurs.
    */
   public static final String PROJECT_VERSION = "azkaban.flow.projectversion";
-  
+
   /**
    * Find out who is the submit user, in addition to the user.to.proxy (they may be different)
    */
diff --git a/azkaban-common/src/main/java/azkaban/flow/Edge.java b/azkaban-common/src/main/java/azkaban/flow/Edge.java
index 52b835f..56ddf2c 100644
--- a/azkaban-common/src/main/java/azkaban/flow/Edge.java
+++ b/azkaban-common/src/main/java/azkaban/flow/Edge.java
@@ -23,6 +23,7 @@ import java.util.List;
 import java.util.Map;
 
 public class Edge {
+
   private final String sourceId;
   private final String targetId;
   private Node source;
@@ -33,84 +34,117 @@ public class Edge {
   private String guideType;
   private List<Point2D> guideValues;
 
-  public Edge(String fromId, String toId) {
+  public Edge(final String fromId, final String toId) {
     this.sourceId = fromId;
     this.targetId = toId;
   }
 
-  public Edge(Edge clone) {
+  public Edge(final Edge clone) {
     this.sourceId = clone.getSourceId();
     this.targetId = clone.getTargetId();
     this.error = clone.getError();
   }
 
+  public static Edge fromObject(final Object obj) {
+    final HashMap<String, Object> edgeObj = (HashMap<String, Object>) obj;
+
+    final String source = (String) edgeObj.get("source");
+    final String target = (String) edgeObj.get("target");
+
+    final String error = (String) edgeObj.get("error");
+
+    final Edge edge = new Edge(source, target);
+    edge.setError(error);
+
+    if (edgeObj.containsKey("guides")) {
+      final Map<String, Object> guideMap =
+          (Map<String, Object>) edgeObj.get("guides");
+      final List<Object> values = (List<Object>) guideMap.get("values");
+      final String type = (String) guideMap.get("type");
+
+      final ArrayList<Point2D> valuePoints = new ArrayList<>();
+      for (final Object pointObj : values) {
+        final Map<String, Double> point = (Map<String, Double>) pointObj;
+
+        final Double x = point.get("x");
+        final Double y = point.get("y");
+
+        valuePoints.add(new Point2D.Double(x, y));
+      }
+
+      edge.setGuides(type, valuePoints);
+    }
+
+    return edge;
+  }
+
   public String getId() {
     return getSourceId() + ">>" + getTargetId();
   }
 
   public String getSourceId() {
-    return sourceId;
+    return this.sourceId;
   }
 
   public String getTargetId() {
-    return targetId;
-  }
-
-  public void setError(String error) {
-    this.error = error;
+    return this.targetId;
   }
 
   public String getError() {
     return this.error;
   }
 
+  public void setError(final String error) {
+    this.error = error;
+  }
+
   public boolean hasError() {
     return this.error != null;
   }
 
   public Node getSource() {
-    return source;
+    return this.source;
   }
 
-  public void setSource(Node source) {
+  public void setSource(final Node source) {
     this.source = source;
   }
 
   public Node getTarget() {
-    return target;
+    return this.target;
   }
 
-  public void setTarget(Node target) {
+  public void setTarget(final Node target) {
     this.target = target;
   }
 
   public String getGuideType() {
-    return guideType;
+    return this.guideType;
   }
 
   public List<Point2D> getGuideValues() {
-    return guideValues;
+    return this.guideValues;
   }
 
-  public void setGuides(String type, List<Point2D> values) {
+  public void setGuides(final String type, final List<Point2D> values) {
     this.guideType = type;
     this.guideValues = values;
   }
 
   public Object toObject() {
-    HashMap<String, Object> obj = new HashMap<String, Object>();
+    final HashMap<String, Object> obj = new HashMap<>();
     obj.put("source", getSourceId());
     obj.put("target", getTargetId());
     if (hasError()) {
-      obj.put("error", error);
+      obj.put("error", this.error);
     }
-    if (guideValues != null) {
-      HashMap<String, Object> lineGuidesObj = new HashMap<String, Object>();
-      lineGuidesObj.put("type", guideType);
+    if (this.guideValues != null) {
+      final HashMap<String, Object> lineGuidesObj = new HashMap<>();
+      lineGuidesObj.put("type", this.guideType);
 
-      ArrayList<Object> guides = new ArrayList<Object>();
-      for (Point2D point : this.guideValues) {
-        HashMap<String, Double> pointObj = new HashMap<String, Double>();
+      final ArrayList<Object> guides = new ArrayList<>();
+      for (final Point2D point : this.guideValues) {
+        final HashMap<String, Double> pointObj = new HashMap<>();
         pointObj.put("x", point.getX());
         pointObj.put("y", point.getY());
         guides.add(pointObj);
@@ -123,38 +157,4 @@ public class Edge {
     return obj;
   }
 
-  @SuppressWarnings("unchecked")
-  public static Edge fromObject(Object obj) {
-    HashMap<String, Object> edgeObj = (HashMap<String, Object>) obj;
-
-    String source = (String) edgeObj.get("source");
-    String target = (String) edgeObj.get("target");
-
-    String error = (String) edgeObj.get("error");
-
-    Edge edge = new Edge(source, target);
-    edge.setError(error);
-
-    if (edgeObj.containsKey("guides")) {
-      Map<String, Object> guideMap =
-          (Map<String, Object>) edgeObj.get("guides");
-      List<Object> values = (List<Object>) guideMap.get("values");
-      String type = (String) guideMap.get("type");
-
-      ArrayList<Point2D> valuePoints = new ArrayList<Point2D>();
-      for (Object pointObj : values) {
-        Map<String, Double> point = (Map<String, Double>) pointObj;
-
-        Double x = point.get("x");
-        Double y = point.get("y");
-
-        valuePoints.add(new Point2D.Double(x, y));
-      }
-
-      edge.setGuides(type, valuePoints);
-    }
-
-    return edge;
-  }
-
 }
diff --git a/azkaban-common/src/main/java/azkaban/flow/Flow.java b/azkaban-common/src/main/java/azkaban/flow/Flow.java
index 3e09476..fee2e29 100644
--- a/azkaban-common/src/main/java/azkaban/flow/Flow.java
+++ b/azkaban-common/src/main/java/azkaban/flow/Flow.java
@@ -16,6 +16,7 @@
 
 package azkaban.flow;
 
+import azkaban.executor.mail.DefaultMailCreator;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -24,205 +25,282 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import azkaban.executor.mail.DefaultMailCreator;
-
 public class Flow {
+
   private final String id;
+  private final HashMap<String, Node> nodes = new HashMap<>();
+  private final HashMap<String, Edge> edges = new HashMap<>();
+  private final HashMap<String, Set<Edge>> outEdges =
+      new HashMap<>();
+  private final HashMap<String, Set<Edge>> inEdges = new HashMap<>();
+  private final HashMap<String, FlowProps> flowProps =
+      new HashMap<>();
   private int projectId;
   private ArrayList<Node> startNodes = null;
   private ArrayList<Node> endNodes = null;
   private int numLevels = -1;
-
-  private HashMap<String, Node> nodes = new HashMap<String, Node>();
-
-  private HashMap<String, Edge> edges = new HashMap<String, Edge>();
-  private HashMap<String, Set<Edge>> outEdges =
-      new HashMap<String, Set<Edge>>();
-  private HashMap<String, Set<Edge>> inEdges = new HashMap<String, Set<Edge>>();
-  private HashMap<String, FlowProps> flowProps =
-      new HashMap<String, FlowProps>();
-
-  private List<String> failureEmail = new ArrayList<String>();
-  private List<String> successEmail = new ArrayList<String>();
+  private List<String> failureEmail = new ArrayList<>();
+  private List<String> successEmail = new ArrayList<>();
   private String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR;
   private ArrayList<String> errors;
   private int version = -1;
-  private Map<String, Object> metadata = new HashMap<String, Object>();
+  private Map<String, Object> metadata = new HashMap<>();
 
   private boolean isLayedOut = false;
 
-  public Flow(String id) {
+  public Flow(final String id) {
     this.id = id;
   }
 
-  public void setVersion(int version) {
-    this.version = version;
+  public static Flow flowFromObject(final Object object) {
+    final Map<String, Object> flowObject = (Map<String, Object>) object;
+
+    final String id = (String) flowObject.get("id");
+    final Boolean layedout = (Boolean) flowObject.get("layedout");
+    final Flow flow = new Flow(id);
+    if (layedout != null) {
+      flow.setLayedOut(layedout);
+    }
+    final int projId = (Integer) flowObject.get("project.id");
+    flow.setProjectId(projId);
+
+    final int version = (Integer) flowObject.get("version");
+    flow.setVersion(version);
+
+    // Loading projects
+    final List<Object> propertiesList = (List<Object>) flowObject.get("props");
+    final Map<String, FlowProps> properties =
+        loadPropertiesFromObject(propertiesList);
+    flow.addAllFlowProperties(properties.values());
+
+    // Loading nodes
+    final List<Object> nodeList = (List<Object>) flowObject.get("nodes");
+    final Map<String, Node> nodes = loadNodesFromObjects(nodeList);
+    flow.addAllNodes(nodes.values());
+
+    // Loading edges
+    final List<Object> edgeList = (List<Object>) flowObject.get("edges");
+    final List<Edge> edges = loadEdgeFromObjects(edgeList, nodes);
+    flow.addAllEdges(edges);
+
+    final Map<String, Object> metadata =
+        (Map<String, Object>) flowObject.get("metadata");
+
+    if (metadata != null) {
+      flow.setMetadata(metadata);
+    }
+
+    flow.failureEmail = (List<String>) flowObject.get("failure.email");
+    flow.successEmail = (List<String>) flowObject.get("success.email");
+    if (flowObject.containsKey("mailCreator")) {
+      flow.mailCreator = flowObject.get("mailCreator").toString();
+    }
+    return flow;
+  }
+
+  private static Map<String, Node> loadNodesFromObjects(final List<Object> nodeList) {
+    final Map<String, Node> nodeMap = new HashMap<>();
+
+    for (final Object obj : nodeList) {
+      final Node node = Node.fromObject(obj);
+      nodeMap.put(node.getId(), node);
+    }
+
+    return nodeMap;
+  }
+
+  private static List<Edge> loadEdgeFromObjects(final List<Object> edgeList,
+      final Map<String, Node> nodes) {
+    final List<Edge> edgeResult = new ArrayList<>();
+
+    for (final Object obj : edgeList) {
+      final Edge edge = Edge.fromObject(obj);
+      edgeResult.add(edge);
+    }
+
+    return edgeResult;
+  }
+
+  private static Map<String, FlowProps> loadPropertiesFromObject(
+      final List<Object> propertyObjectList) {
+    final Map<String, FlowProps> properties = new HashMap<>();
+
+    for (final Object propObj : propertyObjectList) {
+      final FlowProps prop = FlowProps.fromObject(propObj);
+      properties.put(prop.getSource(), prop);
+    }
+
+    return properties;
   }
 
   public int getVersion() {
-    return version;
+    return this.version;
+  }
+
+  public void setVersion(final int version) {
+    this.version = version;
   }
 
   public void initialize() {
-    if (startNodes == null) {
-      startNodes = new ArrayList<Node>();
-      endNodes = new ArrayList<Node>();
-      for (Node node : nodes.values()) {
+    if (this.startNodes == null) {
+      this.startNodes = new ArrayList<>();
+      this.endNodes = new ArrayList<>();
+      for (final Node node : this.nodes.values()) {
         // If it doesn't have any incoming edges, its a start node
-        if (!inEdges.containsKey(node.getId())) {
-          startNodes.add(node);
+        if (!this.inEdges.containsKey(node.getId())) {
+          this.startNodes.add(node);
         }
 
         // If it doesn't contain any outgoing edges, its an end node.
-        if (!outEdges.containsKey(node.getId())) {
-          endNodes.add(node);
+        if (!this.outEdges.containsKey(node.getId())) {
+          this.endNodes.add(node);
         }
       }
 
-      for (Node node : startNodes) {
+      for (final Node node : this.startNodes) {
         node.setLevel(0);
-        numLevels = 0;
+        this.numLevels = 0;
         recursiveSetLevels(node);
       }
     }
   }
 
-  private void recursiveSetLevels(Node node) {
-    Set<Edge> edges = outEdges.get(node.getId());
+  private void recursiveSetLevels(final Node node) {
+    final Set<Edge> edges = this.outEdges.get(node.getId());
     if (edges != null) {
-      for (Edge edge : edges) {
-        Node nextNode = nodes.get(edge.getTargetId());
+      for (final Edge edge : edges) {
+        final Node nextNode = this.nodes.get(edge.getTargetId());
         edge.setSource(node);
         edge.setTarget(nextNode);
 
         // We pick whichever is higher to get the max distance from root.
-        int level = Math.max(node.getLevel() + 1, nextNode.getLevel());
+        final int level = Math.max(node.getLevel() + 1, nextNode.getLevel());
         nextNode.setLevel(level);
-        numLevels = Math.max(level, numLevels);
+        this.numLevels = Math.max(level, this.numLevels);
         recursiveSetLevels(nextNode);
       }
     }
   }
 
-  public Node getNode(String nodeId) {
-    return nodes.get(nodeId);
+  public Node getNode(final String nodeId) {
+    return this.nodes.get(nodeId);
   }
 
   public List<String> getSuccessEmails() {
-    return successEmail;
+    return this.successEmail;
   }
 
   public String getMailCreator() {
-    return mailCreator;
+    return this.mailCreator;
   }
 
-  public List<String> getFailureEmails() {
-    return failureEmail;
+  public void setMailCreator(final String mailCreator) {
+    this.mailCreator = mailCreator;
   }
 
-  public void setMailCreator(String mailCreator) {
-    this.mailCreator = mailCreator;
+  public List<String> getFailureEmails() {
+    return this.failureEmail;
   }
 
-  public void addSuccessEmails(Collection<String> emails) {
-    successEmail.addAll(emails);
+  public void addSuccessEmails(final Collection<String> emails) {
+    this.successEmail.addAll(emails);
   }
 
-  public void addFailureEmails(Collection<String> emails) {
-    failureEmail.addAll(emails);
+  public void addFailureEmails(final Collection<String> emails) {
+    this.failureEmail.addAll(emails);
   }
 
   public int getNumLevels() {
-    return numLevels;
+    return this.numLevels;
   }
 
   public List<Node> getStartNodes() {
-    return startNodes;
+    return this.startNodes;
   }
 
   public List<Node> getEndNodes() {
-    return endNodes;
+    return this.endNodes;
   }
 
-  public Set<Edge> getInEdges(String id) {
-    return inEdges.get(id);
+  public Set<Edge> getInEdges(final String id) {
+    return this.inEdges.get(id);
   }
 
-  public Set<Edge> getOutEdges(String id) {
-    return outEdges.get(id);
+  public Set<Edge> getOutEdges(final String id) {
+    return this.outEdges.get(id);
   }
 
-  public void addAllNodes(Collection<Node> nodes) {
-    for (Node node : nodes) {
+  public void addAllNodes(final Collection<Node> nodes) {
+    for (final Node node : nodes) {
       addNode(node);
     }
   }
 
-  public void addNode(Node node) {
-    nodes.put(node.getId(), node);
+  public void addNode(final Node node) {
+    this.nodes.put(node.getId(), node);
   }
 
-  public void addAllFlowProperties(Collection<FlowProps> props) {
-    for (FlowProps prop : props) {
-      flowProps.put(prop.getSource(), prop);
+  public void addAllFlowProperties(final Collection<FlowProps> props) {
+    for (final FlowProps prop : props) {
+      this.flowProps.put(prop.getSource(), prop);
     }
   }
 
   public String getId() {
-    return id;
+    return this.id;
   }
 
-  public void addError(String error) {
-    if (errors == null) {
-      errors = new ArrayList<String>();
+  public void addError(final String error) {
+    if (this.errors == null) {
+      this.errors = new ArrayList<>();
     }
 
-    errors.add(error);
+    this.errors.add(error);
   }
 
   public List<String> getErrors() {
-    return errors;
+    return this.errors;
   }
 
   public boolean hasErrors() {
-    return errors != null && !errors.isEmpty();
+    return this.errors != null && !this.errors.isEmpty();
   }
 
   public Collection<Node> getNodes() {
-    return nodes.values();
+    return this.nodes.values();
   }
 
   public Collection<Edge> getEdges() {
-    return edges.values();
+    return this.edges.values();
   }
 
-  public void addAllEdges(Collection<Edge> edges) {
-    for (Edge edge : edges) {
+  public void addAllEdges(final Collection<Edge> edges) {
+    for (final Edge edge : edges) {
       addEdge(edge);
     }
   }
 
-  public void addEdge(Edge edge) {
-    String source = edge.getSourceId();
-    String target = edge.getTargetId();
+  public void addEdge(final Edge edge) {
+    final String source = edge.getSourceId();
+    final String target = edge.getTargetId();
 
     if (edge.hasError()) {
       addError("Error on " + edge.getId() + ". " + edge.getError());
     }
 
-    Set<Edge> sourceSet = getEdgeSet(outEdges, source);
+    final Set<Edge> sourceSet = getEdgeSet(this.outEdges, source);
     sourceSet.add(edge);
 
-    Set<Edge> targetSet = getEdgeSet(inEdges, target);
+    final Set<Edge> targetSet = getEdgeSet(this.inEdges, target);
     targetSet.add(edge);
 
-    edges.put(edge.getId(), edge);
+    this.edges.put(edge.getId(), edge);
   }
 
-  private Set<Edge> getEdgeSet(HashMap<String, Set<Edge>> map, String id) {
+  private Set<Edge> getEdgeSet(final HashMap<String, Set<Edge>> map, final String id) {
     Set<Edge> edges = map.get(id);
     if (edges == null) {
-      edges = new HashSet<Edge>();
+      edges = new HashSet<>();
       map.put(id, edges);
     }
 
@@ -230,33 +308,33 @@ public class Flow {
   }
 
   public Map<String, Object> toObject() {
-    HashMap<String, Object> flowObj = new HashMap<String, Object>();
+    final HashMap<String, Object> flowObj = new HashMap<>();
     flowObj.put("type", "flow");
     flowObj.put("id", getId());
-    flowObj.put("project.id", projectId);
-    flowObj.put("version", version);
+    flowObj.put("project.id", this.projectId);
+    flowObj.put("version", this.version);
     flowObj.put("props", objectizeProperties());
     flowObj.put("nodes", objectizeNodes());
     flowObj.put("edges", objectizeEdges());
-    flowObj.put("failure.email", failureEmail);
-    flowObj.put("success.email", successEmail);
-    flowObj.put("mailCreator", mailCreator);
-    flowObj.put("layedout", isLayedOut);
-    if (errors != null) {
-      flowObj.put("errors", errors);
+    flowObj.put("failure.email", this.failureEmail);
+    flowObj.put("success.email", this.successEmail);
+    flowObj.put("mailCreator", this.mailCreator);
+    flowObj.put("layedout", this.isLayedOut);
+    if (this.errors != null) {
+      flowObj.put("errors", this.errors);
     }
 
-    if (metadata != null) {
-      flowObj.put("metadata", metadata);
+    if (this.metadata != null) {
+      flowObj.put("metadata", this.metadata);
     }
 
     return flowObj;
   }
 
   private List<Object> objectizeProperties() {
-    ArrayList<Object> result = new ArrayList<Object>();
-    for (FlowProps props : flowProps.values()) {
-      Object objProps = props.toObject();
+    final ArrayList<Object> result = new ArrayList<>();
+    for (final FlowProps props : this.flowProps.values()) {
+      final Object objProps = props.toObject();
       result.add(objProps);
     }
 
@@ -264,9 +342,9 @@ public class Flow {
   }
 
   private List<Object> objectizeNodes() {
-    ArrayList<Object> result = new ArrayList<Object>();
-    for (Node node : getNodes()) {
-      Object nodeObj = node.toObject();
+    final ArrayList<Object> result = new ArrayList<>();
+    for (final Node node : getNodes()) {
+      final Object nodeObj = node.toObject();
       result.add(nodeObj);
     }
 
@@ -274,141 +352,59 @@ public class Flow {
   }
 
   private List<Object> objectizeEdges() {
-    ArrayList<Object> result = new ArrayList<Object>();
-    for (Edge edge : getEdges()) {
-      Object edgeObj = edge.toObject();
+    final ArrayList<Object> result = new ArrayList<>();
+    for (final Edge edge : getEdges()) {
+      final Object edgeObj = edge.toObject();
       result.add(edgeObj);
     }
 
     return result;
   }
 
-  @SuppressWarnings("unchecked")
-  public static Flow flowFromObject(Object object) {
-    Map<String, Object> flowObject = (Map<String, Object>) object;
-
-    String id = (String) flowObject.get("id");
-    Boolean layedout = (Boolean) flowObject.get("layedout");
-    Flow flow = new Flow(id);
-    if (layedout != null) {
-      flow.setLayedOut(layedout);
-    }
-    int projId = (Integer) flowObject.get("project.id");
-    flow.setProjectId(projId);
-
-    int version = (Integer) flowObject.get("version");
-    flow.setVersion(version);
-
-    // Loading projects
-    List<Object> propertiesList = (List<Object>) flowObject.get("props");
-    Map<String, FlowProps> properties =
-        loadPropertiesFromObject(propertiesList);
-    flow.addAllFlowProperties(properties.values());
-
-    // Loading nodes
-    List<Object> nodeList = (List<Object>) flowObject.get("nodes");
-    Map<String, Node> nodes = loadNodesFromObjects(nodeList);
-    flow.addAllNodes(nodes.values());
-
-    // Loading edges
-    List<Object> edgeList = (List<Object>) flowObject.get("edges");
-    List<Edge> edges = loadEdgeFromObjects(edgeList, nodes);
-    flow.addAllEdges(edges);
-
-    Map<String, Object> metadata =
-        (Map<String, Object>) flowObject.get("metadata");
-
-    if (metadata != null) {
-      flow.setMetadata(metadata);
-    }
-
-    flow.failureEmail = (List<String>) flowObject.get("failure.email");
-    flow.successEmail = (List<String>) flowObject.get("success.email");
-    if (flowObject.containsKey("mailCreator")) {
-      flow.mailCreator = flowObject.get("mailCreator").toString();
-    }
-    return flow;
-  }
-
-  private static Map<String, Node> loadNodesFromObjects(List<Object> nodeList) {
-    Map<String, Node> nodeMap = new HashMap<String, Node>();
-
-    for (Object obj : nodeList) {
-      Node node = Node.fromObject(obj);
-      nodeMap.put(node.getId(), node);
-    }
-
-    return nodeMap;
-  }
-
-  private static List<Edge> loadEdgeFromObjects(List<Object> edgeList,
-      Map<String, Node> nodes) {
-    List<Edge> edgeResult = new ArrayList<Edge>();
-
-    for (Object obj : edgeList) {
-      Edge edge = Edge.fromObject(obj);
-      edgeResult.add(edge);
-    }
-
-    return edgeResult;
-  }
-
-  private static Map<String, FlowProps> loadPropertiesFromObject(
-      List<Object> propertyObjectList) {
-    Map<String, FlowProps> properties = new HashMap<String, FlowProps>();
-
-    for (Object propObj : propertyObjectList) {
-      FlowProps prop = FlowProps.fromObject(propObj);
-      properties.put(prop.getSource(), prop);
-    }
-
-    return properties;
+  public boolean isLayedOut() {
+    return this.isLayedOut;
   }
 
-  public boolean isLayedOut() {
-    return isLayedOut;
+  public void setLayedOut(final boolean layedOut) {
+    this.isLayedOut = layedOut;
   }
 
   public Map<String, Object> getMetadata() {
-    if (metadata == null) {
-      metadata = new HashMap<String, Object>();
+    if (this.metadata == null) {
+      this.metadata = new HashMap<>();
     }
-    return metadata;
+    return this.metadata;
   }
 
-  public void setMetadata(Map<String, Object> metadata) {
+  public void setMetadata(final Map<String, Object> metadata) {
     this.metadata = metadata;
   }
 
-  public void setLayedOut(boolean layedOut) {
-    this.isLayedOut = layedOut;
-  }
-
   public Map<String, Node> getNodeMap() {
-    return nodes;
+    return this.nodes;
   }
 
   public Map<String, Set<Edge>> getOutEdgeMap() {
-    return outEdges;
+    return this.outEdges;
   }
 
   public Map<String, Set<Edge>> getInEdgeMap() {
-    return inEdges;
+    return this.inEdges;
   }
 
-  public FlowProps getFlowProps(String propSource) {
-    return flowProps.get(propSource);
+  public FlowProps getFlowProps(final String propSource) {
+    return this.flowProps.get(propSource);
   }
 
   public Map<String, FlowProps> getAllFlowProps() {
-    return flowProps;
+    return this.flowProps;
   }
 
   public int getProjectId() {
-    return projectId;
+    return this.projectId;
   }
 
-  public void setProjectId(int projectId) {
+  public void setProjectId(final int projectId) {
     this.projectId = projectId;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/flow/FlowProps.java b/azkaban-common/src/main/java/azkaban/flow/FlowProps.java
index 437aff0..01bd4ad 100644
--- a/azkaban-common/src/main/java/azkaban/flow/FlowProps.java
+++ b/azkaban-common/src/main/java/azkaban/flow/FlowProps.java
@@ -16,30 +16,39 @@
 
 package azkaban.flow;
 
+import azkaban.utils.Props;
 import java.util.HashMap;
 import java.util.Map;
 
-import azkaban.utils.Props;
-
 public class FlowProps {
+
   private String parentSource;
   private String propSource;
   private Props props = null;
 
-  public FlowProps(String parentSource, String propSource) {
+  public FlowProps(final String parentSource, final String propSource) {
     this.parentSource = parentSource;
     this.propSource = propSource;
   }
 
-  public FlowProps(Props props) {
+  public FlowProps(final Props props) {
     this.setProps(props);
   }
 
+  public static FlowProps fromObject(final Object obj) {
+    final Map<String, Object> flowMap = (Map<String, Object>) obj;
+    final String source = (String) flowMap.get("source");
+    final String parentSource = (String) flowMap.get("inherits");
+
+    final FlowProps flowProps = new FlowProps(parentSource, source);
+    return flowProps;
+  }
+
   public Props getProps() {
-    return props;
+    return this.props;
   }
 
-  public void setProps(Props props) {
+  public void setProps(final Props props) {
     this.props = props;
     this.parentSource =
         props.getParent() == null ? null : props.getParent().getSource();
@@ -47,29 +56,19 @@ public class FlowProps {
   }
 
   public String getSource() {
-    return propSource;
+    return this.propSource;
   }
 
   public String getInheritedSource() {
-    return parentSource;
+    return this.parentSource;
   }
 
   public Object toObject() {
-    HashMap<String, Object> obj = new HashMap<String, Object>();
-    obj.put("source", propSource);
-    if (parentSource != null) {
-      obj.put("inherits", parentSource);
+    final HashMap<String, Object> obj = new HashMap<>();
+    obj.put("source", this.propSource);
+    if (this.parentSource != null) {
+      obj.put("inherits", this.parentSource);
     }
     return obj;
   }
-
-  @SuppressWarnings("unchecked")
-  public static FlowProps fromObject(Object obj) {
-    Map<String, Object> flowMap = (Map<String, Object>) obj;
-    String source = (String) flowMap.get("source");
-    String parentSource = (String) flowMap.get("inherits");
-
-    FlowProps flowProps = new FlowProps(parentSource, source);
-    return flowProps;
-  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/flow/Node.java b/azkaban-common/src/main/java/azkaban/flow/Node.java
index 3728666..7ea700a 100644
--- a/azkaban-common/src/main/java/azkaban/flow/Node.java
+++ b/azkaban-common/src/main/java/azkaban/flow/Node.java
@@ -16,13 +16,13 @@
 
 package azkaban.flow;
 
+import azkaban.utils.Utils;
 import java.awt.geom.Point2D;
 import java.util.HashMap;
 import java.util.Map;
 
-import azkaban.utils.Utils;
-
 public class Node {
+
   private final String id;
   private String jobSource;
   private String propsSource;
@@ -34,149 +34,146 @@ public class Node {
 
   private String embeddedFlowId;
 
-  public Node(String id) {
+  public Node(final String id) {
     this.id = id;
   }
 
   /**
    * Clones nodes
-   *
-   * @param node
    */
-  public Node(Node clone) {
+  public Node(final Node clone) {
     this.id = clone.id;
     this.propsSource = clone.propsSource;
     this.jobSource = clone.jobSource;
   }
 
+  public static Node fromObject(final Object obj) {
+    final Map<String, Object> mapObj = (Map<String, Object>) obj;
+    final String id = (String) mapObj.get("id");
+
+    final Node node = new Node(id);
+    final String jobSource = (String) mapObj.get("jobSource");
+    final String propSource = (String) mapObj.get("propSource");
+    final String jobType = (String) mapObj.get("jobType");
+
+    final String embeddedFlowId = (String) mapObj.get("embeddedFlowId");
+
+    node.setJobSource(jobSource);
+    node.setPropsSource(propSource);
+    node.setType(jobType);
+    node.setEmbeddedFlowId(embeddedFlowId);
+
+    final Integer expectedRuntime = (Integer) mapObj.get("expectedRuntime");
+    if (expectedRuntime != null) {
+      node.setExpectedRuntimeSec(expectedRuntime);
+    }
+
+    final Map<String, Object> layoutInfo = (Map<String, Object>) mapObj.get("layout");
+    if (layoutInfo != null) {
+      Double x = null;
+      Double y = null;
+      Integer level = null;
+
+      try {
+        x = Utils.convertToDouble(layoutInfo.get("x"));
+        y = Utils.convertToDouble(layoutInfo.get("y"));
+        level = (Integer) layoutInfo.get("level");
+      } catch (final ClassCastException e) {
+        throw new RuntimeException("Error creating node " + id, e);
+      }
+
+      if (x != null && y != null) {
+        node.setPosition(new Point2D.Double(x, y));
+      }
+      if (level != null) {
+        node.setLevel(level);
+      }
+    }
+
+    return node;
+  }
+
   public String getId() {
-    return id;
+    return this.id;
   }
 
   public String getType() {
-    return type;
+    return this.type;
   }
 
-  public void setType(String type) {
+  public void setType(final String type) {
     this.type = type;
   }
 
   public Point2D getPosition() {
-    return position;
+    return this.position;
   }
 
-  public void setPosition(Point2D position) {
+  public void setPosition(final Point2D position) {
     this.position = position;
   }
 
-  public void setPosition(double x, double y) {
+  public void setPosition(final double x, final double y) {
     this.position = new Point2D.Double(x, y);
   }
 
   public int getLevel() {
-    return level;
+    return this.level;
   }
 
-  public void setLevel(int level) {
+  public void setLevel(final int level) {
     this.level = level;
   }
 
   public String getJobSource() {
-    return jobSource;
+    return this.jobSource;
   }
 
-  public void setJobSource(String jobSource) {
+  public void setJobSource(final String jobSource) {
     this.jobSource = jobSource;
   }
 
   public String getPropsSource() {
-    return propsSource;
+    return this.propsSource;
   }
 
-  public void setPropsSource(String propsSource) {
+  public void setPropsSource(final String propsSource) {
     this.propsSource = propsSource;
   }
 
-  public void setExpectedRuntimeSec(int runtimeSec) {
-    expectedRunTimeSec = runtimeSec;
-  }
-
   public int getExpectedRuntimeSec() {
-    return expectedRunTimeSec;
+    return this.expectedRunTimeSec;
   }
 
-  public void setEmbeddedFlowId(String flowId) {
-    embeddedFlowId = flowId;
+  public void setExpectedRuntimeSec(final int runtimeSec) {
+    this.expectedRunTimeSec = runtimeSec;
   }
 
   public String getEmbeddedFlowId() {
-    return embeddedFlowId;
+    return this.embeddedFlowId;
   }
 
-  @SuppressWarnings("unchecked")
-  public static Node fromObject(Object obj) {
-    Map<String, Object> mapObj = (Map<String, Object>) obj;
-    String id = (String) mapObj.get("id");
-
-    Node node = new Node(id);
-    String jobSource = (String) mapObj.get("jobSource");
-    String propSource = (String) mapObj.get("propSource");
-    String jobType = (String) mapObj.get("jobType");
-
-    String embeddedFlowId = (String) mapObj.get("embeddedFlowId");
-
-    node.setJobSource(jobSource);
-    node.setPropsSource(propSource);
-    node.setType(jobType);
-    node.setEmbeddedFlowId(embeddedFlowId);
-
-    Integer expectedRuntime = (Integer) mapObj.get("expectedRuntime");
-    if (expectedRuntime != null) {
-      node.setExpectedRuntimeSec(expectedRuntime);
-    }
-
-    Map<String, Object> layoutInfo = (Map<String, Object>) mapObj.get("layout");
-    if (layoutInfo != null) {
-      Double x = null;
-      Double y = null;
-      Integer level = null;
-
-      try {
-        x = Utils.convertToDouble(layoutInfo.get("x"));
-        y = Utils.convertToDouble(layoutInfo.get("y"));
-        level = (Integer) layoutInfo.get("level");
-      } catch (ClassCastException e) {
-        throw new RuntimeException("Error creating node " + id, e);
-      }
-
-      if (x != null && y != null) {
-        node.setPosition(new Point2D.Double(x, y));
-      }
-      if (level != null) {
-        node.setLevel(level);
-      }
-    }
-
-    return node;
+  public void setEmbeddedFlowId(final String flowId) {
+    this.embeddedFlowId = flowId;
   }
 
   public Object toObject() {
-    HashMap<String, Object> objMap = new HashMap<String, Object>();
-    objMap.put("id", id);
-    objMap.put("jobSource", jobSource);
-    objMap.put("propSource", propsSource);
-    objMap.put("jobType", type);
-    if (embeddedFlowId != null) {
-      objMap.put("embeddedFlowId", embeddedFlowId);
+    final HashMap<String, Object> objMap = new HashMap<>();
+    objMap.put("id", this.id);
+    objMap.put("jobSource", this.jobSource);
+    objMap.put("propSource", this.propsSource);
+    objMap.put("jobType", this.type);
+    if (this.embeddedFlowId != null) {
+      objMap.put("embeddedFlowId", this.embeddedFlowId);
     }
-    objMap.put("expectedRuntime", expectedRunTimeSec);
+    objMap.put("expectedRuntime", this.expectedRunTimeSec);
 
-    HashMap<String, Object> layoutInfo = new HashMap<String, Object>();
-    if (position != null) {
-      layoutInfo.put("x", position.getX());
-      layoutInfo.put("y", position.getY());
+    final HashMap<String, Object> layoutInfo = new HashMap<>();
+    if (this.position != null) {
+      layoutInfo.put("x", this.position.getX());
+      layoutInfo.put("y", this.position.getY());
     }
-    layoutInfo.put("level", level);
+    layoutInfo.put("level", this.level);
     objMap.put("layout", layoutInfo);
 
     return objMap;
diff --git a/azkaban-common/src/main/java/azkaban/flow/SpecialJobTypes.java b/azkaban-common/src/main/java/azkaban/flow/SpecialJobTypes.java
index 083c2f2..88a7f49 100644
--- a/azkaban-common/src/main/java/azkaban/flow/SpecialJobTypes.java
+++ b/azkaban-common/src/main/java/azkaban/flow/SpecialJobTypes.java
@@ -17,6 +17,7 @@
 package azkaban.flow;
 
 public class SpecialJobTypes {
+
   public static final String BRANCH_START_TYPE = "branch.start";
   public static final String BRANCH_END_TYPE = "branch.end";
 
diff --git a/azkaban-common/src/main/java/azkaban/jmx/DisplayName.java b/azkaban-common/src/main/java/azkaban/jmx/DisplayName.java
index 9498555..7f7d1b2 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/DisplayName.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/DisplayName.java
@@ -21,7 +21,6 @@ import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
-
 import javax.management.DescriptorKey;
 
 /**
@@ -32,6 +31,7 @@ import javax.management.DescriptorKey;
 @Target(ElementType.METHOD)
 @Retention(RetentionPolicy.RUNTIME)
 public @interface DisplayName {
+
   @DescriptorKey("displayName")
   String value();
 }
diff --git a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManager.java b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManager.java
index d459ae9..27c6029 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManager.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManager.java
@@ -16,15 +16,15 @@
 
 package azkaban.jmx;
 
+import azkaban.executor.ExecutorManager;
 import java.util.ArrayList;
 import java.util.List;
 
-import azkaban.executor.ExecutorManager;
-
 public class JmxExecutorManager implements JmxExecutorManagerMBean {
-  private ExecutorManager manager;
 
-  public JmxExecutorManager(ExecutorManager manager) {
+  private final ExecutorManager manager;
+
+  public JmxExecutorManager(final ExecutorManager manager) {
     this.manager = manager;
   }
 
@@ -35,62 +35,62 @@ public class JmxExecutorManager implements JmxExecutorManagerMBean {
 
   @Override
   public String getExecutorThreadState() {
-    return manager.getExecutorManagerThreadState().toString();
+    return this.manager.getExecutorManagerThreadState().toString();
   }
 
   @Override
   public String getExecutorThreadStage() {
-    return manager.getExecutorThreadStage();
+    return this.manager.getExecutorThreadStage();
   }
 
   @Override
   public boolean isThreadActive() {
-    return manager.isExecutorManagerThreadActive();
+    return this.manager.isExecutorManagerThreadActive();
   }
 
   @Override
   public Long getLastThreadCheckTime() {
-    return manager.getLastExecutorManagerThreadCheckTime();
+    return this.manager.getLastExecutorManagerThreadCheckTime();
   }
 
   @Override
   public List<String> getPrimaryExecutorHostPorts() {
-    return new ArrayList<String>(manager.getPrimaryServerHosts());
+    return new ArrayList<>(this.manager.getPrimaryServerHosts());
   }
 
   @Override
   public String getRunningFlows() {
-    return manager.getRunningFlowIds();
+    return this.manager.getRunningFlowIds();
   }
 
   @Override
   public boolean isQueueProcessorActive() {
-    return manager.isQueueProcessorThreadActive();
+    return this.manager.isQueueProcessorThreadActive();
   }
 
   @Override
   public String getQueuedFlows() {
-    return manager.getQueuedFlowIds();
+    return this.manager.getQueuedFlowIds();
   }
 
   @Override
   public String getQueueProcessorThreadState() {
-    return manager.getQueueProcessorThreadState().toString();
+    return this.manager.getQueueProcessorThreadState().toString();
   }
 
   @Override
   public List<String> getAvailableExecutorComparatorNames() {
-    return new ArrayList<String>(manager.getAvailableExecutorComparatorNames());
+    return new ArrayList<>(this.manager.getAvailableExecutorComparatorNames());
   }
 
   @Override
   public List<String> getAvailableExecutorFilterNames() {
-    return new ArrayList<String>(manager.getAvailableExecutorFilterNames());
+    return new ArrayList<>(this.manager.getAvailableExecutorFilterNames());
   }
 
   @Override
   public long getLastSuccessfulExecutorInfoRefresh() {
-    return manager.getLastSuccessfulExecutorInfoRefresh();
+    return this.manager.getLastSuccessfulExecutorInfoRefresh();
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java
index 4637970..00e0677 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapter.java
@@ -16,17 +16,17 @@
 
 package azkaban.jmx;
 
+import azkaban.executor.ExecutorManagerAdapter;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import azkaban.executor.ExecutorManagerAdapter;
-
 public class JmxExecutorManagerAdapter implements
     JmxExecutorManagerAdapterMBean {
-  private ExecutorManagerAdapter manager;
 
-  public JmxExecutorManagerAdapter(ExecutorManagerAdapter manager) {
+  private final ExecutorManagerAdapter manager;
+
+  public JmxExecutorManagerAdapter(final ExecutorManagerAdapter manager) {
     this.manager = manager;
   }
 
@@ -34,7 +34,7 @@ public class JmxExecutorManagerAdapter implements
   public int getNumRunningFlows() {
     try {
       return this.manager.getRunningFlows().size();
-    } catch (IOException e) {
+    } catch (final IOException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
       return 0;
@@ -43,22 +43,22 @@ public class JmxExecutorManagerAdapter implements
 
   @Override
   public String getExecutorManagerThreadState() {
-    return manager.getExecutorManagerThreadState().toString();
+    return this.manager.getExecutorManagerThreadState().toString();
   }
 
   @Override
   public boolean isExecutorManagerThreadActive() {
-    return manager.isExecutorManagerThreadActive();
+    return this.manager.isExecutorManagerThreadActive();
   }
 
   @Override
   public Long getLastExecutorManagerThreadCheckTime() {
-    return manager.getLastExecutorManagerThreadCheckTime();
+    return this.manager.getLastExecutorManagerThreadCheckTime();
   }
 
   @Override
   public List<String> getPrimaryExecutorHostPorts() {
-    return new ArrayList<String>(manager.getPrimaryServerHosts());
+    return new ArrayList<>(this.manager.getPrimaryServerHosts());
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
index 686f604..c513a7d 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerAdapterMBean.java
@@ -19,6 +19,7 @@ package azkaban.jmx;
 import java.util.List;
 
 public interface JmxExecutorManagerAdapterMBean {
+
   @DisplayName("OPERATION: getNumRunningFlows")
   public int getNumRunningFlows();
 
diff --git a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java
index 69e401c..0218d51 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/JmxExecutorManagerMBean.java
@@ -19,6 +19,7 @@ package azkaban.jmx;
 import java.util.List;
 
 public interface JmxExecutorManagerMBean {
+
   @DisplayName("OPERATION: getNumRunningFlows")
   public int getNumRunningFlows();
 
diff --git a/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServer.java b/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServer.java
index fd520f2..288c427 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServer.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServer.java
@@ -20,10 +20,11 @@ import org.mortbay.jetty.Connector;
 import org.mortbay.jetty.Server;
 
 public class JmxJettyServer implements JmxJettyServerMBean {
-  private Server server;
-  private Connector connector;
 
-  public JmxJettyServer(Server server) {
+  private final Server server;
+  private final Connector connector;
+
+  public JmxJettyServer(final Server server) {
     this.server = server;
     this.connector = server.getConnectors()[0];
   }
@@ -55,91 +56,91 @@ public class JmxJettyServer implements JmxJettyServerMBean {
 
   @Override
   public String getHost() {
-    return connector.getHost();
+    return this.connector.getHost();
   }
 
   @Override
   public int getPort() {
-    return connector.getPort();
+    return this.connector.getPort();
   }
 
   @Override
   public int getConfidentialPort() {
-    return connector.getConfidentialPort();
+    return this.connector.getConfidentialPort();
   }
 
   @Override
   public int getConnections() {
-    return connector.getConnections();
+    return this.connector.getConnections();
   }
 
   @Override
   public int getConnectionsOpen() {
-    return connector.getConnectionsOpen();
+    return this.connector.getConnectionsOpen();
   }
 
   @Override
   public int getConnectionsOpenMax() {
-    return connector.getConnectionsOpenMax();
+    return this.connector.getConnectionsOpenMax();
   }
 
   @Override
   public int getConnectionsOpenMin() {
-    return connector.getConnectionsOpenMin();
+    return this.connector.getConnectionsOpenMin();
   }
 
   @Override
   public long getConnectionsDurationAve() {
-    return connector.getConnectionsDurationAve();
+    return this.connector.getConnectionsDurationAve();
   }
 
   @Override
   public long getConnectionsDurationMax() {
-    return connector.getConnectionsDurationMax();
+    return this.connector.getConnectionsDurationMax();
   }
 
   @Override
   public long getConnectionsDurationMin() {
-    return connector.getConnectionsDurationMin();
+    return this.connector.getConnectionsDurationMin();
   }
 
   @Override
   public long getConnectionsDurationTotal() {
-    return connector.getConnectionsDurationTotal();
+    return this.connector.getConnectionsDurationTotal();
   }
 
   @Override
   public long getConnectionsRequestAve() {
-    return connector.getConnectionsRequestsAve();
+    return this.connector.getConnectionsRequestsAve();
   }
 
   @Override
   public long getConnectionsRequestMax() {
-    return connector.getConnectionsRequestsMax();
+    return this.connector.getConnectionsRequestsMax();
   }
 
   @Override
   public long getConnectionsRequestMin() {
-    return connector.getConnectionsRequestsMin();
+    return this.connector.getConnectionsRequestsMin();
   }
 
   @Override
   public void turnStatsOn() {
-    connector.setStatsOn(true);
+    this.connector.setStatsOn(true);
   }
 
   @Override
   public void turnStatsOff() {
-    connector.setStatsOn(false);
+    this.connector.setStatsOn(false);
   }
 
   @Override
   public void resetStats() {
-    connector.statsReset();
+    this.connector.statsReset();
   }
 
   @Override
   public boolean isStatsOn() {
-    return connector.getStatsOn();
+    return this.connector.getStatsOn();
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServerMBean.java b/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServerMBean.java
index 7862c6c..b03c63e 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServerMBean.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/JmxJettyServerMBean.java
@@ -17,6 +17,7 @@
 package azkaban.jmx;
 
 public interface JmxJettyServerMBean {
+
   @DisplayName("OPERATION: isRunning")
   public boolean isRunning();
 
diff --git a/azkaban-common/src/main/java/azkaban/jmx/JmxTriggerManager.java b/azkaban-common/src/main/java/azkaban/jmx/JmxTriggerManager.java
index 9e46645..81b7423 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/JmxTriggerManager.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/JmxTriggerManager.java
@@ -20,25 +20,26 @@ import azkaban.trigger.TriggerManagerAdapter;
 import azkaban.trigger.TriggerManagerAdapter.TriggerJMX;
 
 public class JmxTriggerManager implements JmxTriggerManagerMBean {
-  private TriggerJMX jmxStats;
 
-  public JmxTriggerManager(TriggerManagerAdapter manager) {
+  private final TriggerJMX jmxStats;
+
+  public JmxTriggerManager(final TriggerManagerAdapter manager) {
     this.jmxStats = manager.getJMX();
   }
 
   @Override
   public long getLastRunnerThreadCheckTime() {
-    return jmxStats.getLastRunnerThreadCheckTime();
+    return this.jmxStats.getLastRunnerThreadCheckTime();
   }
 
   @Override
   public boolean isRunnerThreadActive() {
-    return jmxStats.isRunnerThreadActive();
+    return this.jmxStats.isRunnerThreadActive();
   }
 
   @Override
   public String getPrimaryTriggerHostPort() {
-    return jmxStats.getPrimaryServerHost();
+    return this.jmxStats.getPrimaryServerHost();
   }
 
   // @Override
@@ -48,26 +49,26 @@ public class JmxTriggerManager implements JmxTriggerManagerMBean {
 
   @Override
   public int getNumTriggers() {
-    return jmxStats.getNumTriggers();
+    return this.jmxStats.getNumTriggers();
   }
 
   @Override
   public String getTriggerSources() {
-    return jmxStats.getTriggerSources();
+    return this.jmxStats.getTriggerSources();
   }
 
   @Override
   public String getTriggerIds() {
-    return jmxStats.getTriggerIds();
+    return this.jmxStats.getTriggerIds();
   }
 
   @Override
   public long getScannerIdleTime() {
-    return jmxStats.getScannerIdleTime();
+    return this.jmxStats.getScannerIdleTime();
   }
 
   @Override
   public String getScannerThreadStage() {
-    return jmxStats.getScannerThreadStage();
+    return this.jmxStats.getScannerThreadStage();
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/jmx/ParameterName.java b/azkaban-common/src/main/java/azkaban/jmx/ParameterName.java
index 8a4a077..36f75f1 100644
--- a/azkaban-common/src/main/java/azkaban/jmx/ParameterName.java
+++ b/azkaban-common/src/main/java/azkaban/jmx/ParameterName.java
@@ -19,18 +19,19 @@ package azkaban.jmx;
  * ParameterName - This annotation allows to supply
  * a parameter name for a method in the MBean interface.
  */
+
 import java.lang.annotation.Documented;
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
-
 import javax.management.DescriptorKey;
 
 @Documented
 @Target(ElementType.PARAMETER)
 @Retention(RetentionPolicy.RUNTIME)
 public @interface ParameterName {
+
   @DescriptorKey("parameterName")
   String value();
 }
diff --git a/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackConstants.java b/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackConstants.java
index 122640d..0731a64 100644
--- a/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackConstants.java
+++ b/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackConstants.java
@@ -1,6 +1,7 @@
 package azkaban.jobcallback;
 
 public interface JobCallbackConstants {
+
   public static final String STATUS_TOKEN = "status";
   public static final String SEQUENCE_TOKEN = "sequence";
   public static final String HTTP_GET = "GET";
diff --git a/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackValidator.java b/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackValidator.java
index a85d1a3..680db51 100644
--- a/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackValidator.java
+++ b/azkaban-common/src/main/java/azkaban/jobcallback/JobCallbackValidator.java
@@ -10,18 +10,15 @@ import static azkaban.jobcallback.JobCallbackConstants.MAX_POST_BODY_LENGTH_PROP
 import static azkaban.jobcallback.JobCallbackConstants.SEQUENCE_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.STATUS_TOKEN;
 
+import azkaban.utils.Props;
 import java.util.Collection;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Props;
-
 /**
  * Responsible for validating the job callback related properties at project
  * upload time
- * 
- * @author hluu
  *
+ * @author hluu
  */
 public class JobCallbackValidator {
 
@@ -30,25 +27,22 @@ public class JobCallbackValidator {
 
   /**
    * Make sure all the job callback related properties are valid
-   * 
-   * @param jobProps
-   * @param error
-   * @return number of valid job callback properties. Mainly for testing
-   *         purpose.
+   *
+   * @return number of valid job callback properties. Mainly for testing purpose.
    */
-  public static int validate(String jobName, Props serverProps, Props jobProps,
-      Collection<String> errors) {
-    int maxNumCallback =
+  public static int validate(final String jobName, final Props serverProps, final Props jobProps,
+      final Collection<String> errors) {
+    final int maxNumCallback =
         serverProps.getInt(
             JobCallbackConstants.MAX_CALLBACK_COUNT_PROPERTY_KEY,
             JobCallbackConstants.DEFAULT_MAX_CALLBACK_COUNT);
 
-    int maxPostBodyLength =
+    final int maxPostBodyLength =
         serverProps.getInt(MAX_POST_BODY_LENGTH_PROPERTY_KEY,
             DEFAULT_POST_BODY_LENGTH);
 
     int totalCallbackCount = 0;
-    for (JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum.values()) {
+    for (final JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum.values()) {
       totalCallbackCount +=
           validateBasedOnStatus(jobProps, errors, jobStatus, maxNumCallback,
               maxPostBodyLength);
@@ -61,29 +55,29 @@ public class JobCallbackValidator {
     return totalCallbackCount;
   }
 
-  private static int validateBasedOnStatus(Props jobProps,
-      Collection<String> errors, JobCallbackStatusEnum jobStatus,
-      int maxNumCallback, int maxPostBodyLength) {
+  private static int validateBasedOnStatus(final Props jobProps,
+      final Collection<String> errors, final JobCallbackStatusEnum jobStatus,
+      final int maxNumCallback, final int maxPostBodyLength) {
 
     int callbackCount = 0;
     // replace property templates with status
-    String jobCallBackUrl =
+    final String jobCallBackUrl =
         JOB_CALLBACK_URL_TEMPLATE.replaceFirst(STATUS_TOKEN, jobStatus.name()
             .toLowerCase());
 
-    String requestMethod =
+    final String requestMethod =
         JOB_CALLBACK_REQUEST_METHOD_TEMPLATE.replaceFirst(STATUS_TOKEN,
             jobStatus.name().toLowerCase());
 
-    String httpBody =
+    final String httpBody =
         JOB_CALLBACK_BODY_TEMPLATE.replaceFirst(STATUS_TOKEN, jobStatus.name()
             .toLowerCase());
 
     for (int i = 0; i <= maxNumCallback; i++) {
       // callback url
-      String callbackUrlKey =
+      final String callbackUrlKey =
           jobCallBackUrl.replaceFirst(SEQUENCE_TOKEN, Integer.toString(i));
-      String callbackUrlValue = jobProps.get(callbackUrlKey);
+      final String callbackUrlValue = jobProps.get(callbackUrlKey);
 
       // sequence number should start at 1, this is to check for sequence
       // number that starts a 0
@@ -97,16 +91,16 @@ public class JobCallbackValidator {
       if (callbackUrlValue == null || callbackUrlValue.length() == 0) {
         break;
       } else {
-        String requestMethodKey =
+        final String requestMethodKey =
             requestMethod.replaceFirst(SEQUENCE_TOKEN, Integer.toString(i));
 
-        String methodValue = jobProps.getString(requestMethodKey, HTTP_GET);
+        final String methodValue = jobProps.getString(requestMethodKey, HTTP_GET);
 
         if (HTTP_POST.equals(methodValue)) {
           // now try to get the post body
-          String postBodyKey =
+          final String postBodyKey =
               httpBody.replaceFirst(SEQUENCE_TOKEN, Integer.toString(i));
-          String postBodyValue = jobProps.get(postBodyKey);
+          final String postBodyValue = jobProps.get(postBodyKey);
           if (postBodyValue == null || postBodyValue.length() == 0) {
             errors.add("No POST body was specified for job callback '"
                 + callbackUrlValue + "'");
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractJob.java
index c28a291..4092fe2 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractJob.java
@@ -16,9 +16,8 @@
 
 package azkaban.jobExecutor;
 
-import org.apache.log4j.Logger;
-
 import azkaban.utils.Props;
+import org.apache.log4j.Logger;
 
 public abstract class AbstractJob implements Job {
 
@@ -32,64 +31,64 @@ public abstract class AbstractJob implements Job {
   private final Logger _log;
   private volatile double _progress;
 
-  protected AbstractJob(String id, Logger log) {
-    _id = id;
-    _log = log;
-    _progress = 0.0;
+  protected AbstractJob(final String id, final Logger log) {
+    this._id = id;
+    this._log = log;
+    this._progress = 0.0;
   }
 
   @Override
   public String getId() {
-    return _id;
+    return this._id;
   }
 
   @Override
   public double getProgress() throws Exception {
-    return _progress;
+    return this._progress;
   }
 
-  public void setProgress(double progress) {
+  public void setProgress(final double progress) {
     this._progress = progress;
   }
 
   @Override
   public void cancel() throws Exception {
-    throw new RuntimeException("Job " + _id + " does not support cancellation!");
+    throw new RuntimeException("Job " + this._id + " does not support cancellation!");
   }
 
   public Logger getLog() {
     return this._log;
   }
 
-  public void debug(String message) {
+  public void debug(final String message) {
     this._log.debug(message);
   }
 
-  public void debug(String message, Throwable t) {
+  public void debug(final String message, final Throwable t) {
     this._log.debug(message, t);
   }
 
-  public void info(String message) {
+  public void info(final String message) {
     this._log.info(message);
   }
 
-  public void info(String message, Throwable t) {
+  public void info(final String message, final Throwable t) {
     this._log.info(message, t);
   }
 
-  public void warn(String message) {
+  public void warn(final String message) {
     this._log.warn(message);
   }
 
-  public void warn(String message, Throwable t) {
+  public void warn(final String message, final Throwable t) {
     this._log.warn(message, t);
   }
 
-  public void error(String message) {
+  public void error(final String message) {
     this._log.error(message);
   }
 
-  public void error(String message, Throwable t) {
+  public void error(final String message, final Throwable t) {
     this._log.error(message, t);
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java
index 831e49b..d5f2c69 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/AbstractProcessJob.java
@@ -16,6 +16,9 @@
 
 package azkaban.jobExecutor;
 
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Props;
+import azkaban.utils.PropsUtils;
 import java.io.BufferedInputStream;
 import java.io.File;
 import java.io.FileInputStream;
@@ -23,20 +26,15 @@ import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.Map;
-
 import org.apache.commons.fileupload.util.Streams;
 import org.apache.commons.io.IOUtils;
 import org.apache.log4j.Logger;
 
-import azkaban.utils.JSONUtils;
-import azkaban.utils.Props;
-import azkaban.utils.PropsUtils;
-
 /**
  * A revised process-based job
  */
 public abstract class AbstractProcessJob extends AbstractJob {
-  private final Logger log;
+
   public static final String ENV_PREFIX = "env.";
   public static final String ENV_PREFIX_UCASE = "ENV.";
   public static final String WORKING_DIR = "working.dir";
@@ -46,9 +44,8 @@ public abstract class AbstractProcessJob extends AbstractJob {
   private static final String SENSITIVE_JOB_PROP_NAME_SUFFIX = "_X";
   private static final String SENSITIVE_JOB_PROP_VALUE_PLACEHOLDER = "[MASKED]";
   private static final String JOB_DUMP_PROPERTIES_IN_LOG = "job.dump.properties";
-
   protected final String _jobPath;
-
+  private final Logger log;
   protected volatile Props jobProps;
   protected volatile Props sysProps;
 
@@ -56,32 +53,49 @@ public abstract class AbstractProcessJob extends AbstractJob {
 
   private volatile Props generatedProperties;
 
-  protected AbstractProcessJob(String jobid, final Props sysProps,
+  protected AbstractProcessJob(final String jobid, final Props sysProps,
       final Props jobProps, final Logger log) {
     super(jobid, log);
 
     this.jobProps = jobProps;
     this.sysProps = sysProps;
-    _cwd = getWorkingDirectory();
-    _jobPath = _cwd;
+    this._cwd = getWorkingDirectory();
+    this._jobPath = this._cwd;
 
     this.log = log;
   }
 
+  public static File createOutputPropsFile(final String id,
+      final String workingDir) {
+    System.err.println("cwd=" + workingDir);
+
+    final File directory = new File(workingDir);
+    File tempFile = null;
+    try {
+      tempFile = File.createTempFile(id + "_output_", "_tmp", directory);
+    } catch (final IOException e) {
+      System.err.println("Failed to create temp output property file :\n");
+      e.printStackTrace(System.err);
+      throw new RuntimeException("Failed to create temp output property file ",
+          e);
+    }
+    return tempFile;
+  }
+
   public Props getJobProps() {
-    return jobProps;
+    return this.jobProps;
   }
 
   public Props getSysProps() {
-    return sysProps;
+    return this.sysProps;
   }
 
   public String getJobPath() {
-    return _jobPath;
+    return this._jobPath;
   }
 
   protected void resolveProps() {
-    jobProps = PropsUtils.resolveProps(jobProps);
+    this.jobProps = PropsUtils.resolveProps(this.jobProps);
   }
 
   /**
@@ -89,29 +103,29 @@ public abstract class AbstractProcessJob extends AbstractJob {
    */
   protected void logJobProperties() {
     if (this.jobProps != null &&
-        this.jobProps.getBoolean(JOB_DUMP_PROPERTIES_IN_LOG, false)){
+        this.jobProps.getBoolean(JOB_DUMP_PROPERTIES_IN_LOG, false)) {
       try {
-        Map<String,String> flattenedProps = this.jobProps.getFlattened();
+        final Map<String, String> flattenedProps = this.jobProps.getFlattened();
         this.info("******   Job properties   ******");
         this.info(String.format("- Note : value is masked if property name ends with '%s'.",
-            SENSITIVE_JOB_PROP_NAME_SUFFIX ));
-        for(Map.Entry<String, String> entry : flattenedProps.entrySet()){
-          String key = entry.getKey();
-          String value = key.endsWith(SENSITIVE_JOB_PROP_NAME_SUFFIX)?
-                                      SENSITIVE_JOB_PROP_VALUE_PLACEHOLDER :
-                                      entry.getValue();
-          this.info(String.format("%s=%s",key,value));
+            SENSITIVE_JOB_PROP_NAME_SUFFIX));
+        for (final Map.Entry<String, String> entry : flattenedProps.entrySet()) {
+          final String key = entry.getKey();
+          final String value = key.endsWith(SENSITIVE_JOB_PROP_NAME_SUFFIX) ?
+              SENSITIVE_JOB_PROP_VALUE_PLACEHOLDER :
+              entry.getValue();
+          this.info(String.format("%s=%s", key, value));
         }
         this.info("****** End Job properties  ******");
-      } catch (Exception ex){
-        log.error("failed to log job properties ", ex);
+      } catch (final Exception ex) {
+        this.log.error("failed to log job properties ", ex);
       }
     }
   }
 
   @Override
   public Props getJobGeneratedProperties() {
-    return generatedProperties;
+    return this.generatedProperties;
   }
 
   /**
@@ -121,30 +135,30 @@ public abstract class AbstractProcessJob extends AbstractJob {
    */
   public File[] initPropsFiles() {
     // Create properties file with additionally all input generated properties.
-    File[] files = new File[2];
-    files[0] = createFlattenedPropsFile(_cwd);
+    final File[] files = new File[2];
+    files[0] = createFlattenedPropsFile(this._cwd);
 
-    jobProps.put(ENV_PREFIX + JOB_PROP_ENV, files[0].getAbsolutePath());
-    jobProps.put(ENV_PREFIX + JOB_NAME_ENV, getId());
+    this.jobProps.put(ENV_PREFIX + JOB_PROP_ENV, files[0].getAbsolutePath());
+    this.jobProps.put(ENV_PREFIX + JOB_NAME_ENV, getId());
 
-    files[1] = createOutputPropsFile(getId(), _cwd);
-    jobProps.put(ENV_PREFIX + JOB_OUTPUT_PROP_FILE, files[1].getAbsolutePath());
+    files[1] = createOutputPropsFile(getId(), this._cwd);
+    this.jobProps.put(ENV_PREFIX + JOB_OUTPUT_PROP_FILE, files[1].getAbsolutePath());
     return files;
   }
 
   public String getCwd() {
-    return _cwd;
+    return this._cwd;
   }
 
   public Map<String, String> getEnvironmentVariables() {
-    Props props = getJobProps();
-    Map<String, String> envMap = props.getMapByPrefix(ENV_PREFIX);
+    final Props props = getJobProps();
+    final Map<String, String> envMap = props.getMapByPrefix(ENV_PREFIX);
     envMap.putAll(props.getMapByPrefix(ENV_PREFIX_UCASE));
     return envMap;
   }
 
   public String getWorkingDirectory() {
-    String workingDir = getJobProps().getString(WORKING_DIR, _jobPath);
+    final String workingDir = getJobProps().getString(WORKING_DIR, this._jobPath);
     if (workingDir == null) {
       return "";
     }
@@ -160,25 +174,24 @@ public abstract class AbstractProcessJob extends AbstractJob {
       reader =
           new BufferedInputStream(new FileInputStream(outputPropertiesFile));
 
-      Props outputProps = new Props();
+      final Props outputProps = new Props();
       final String content = Streams.asString(reader).trim();
 
       if (!content.isEmpty()) {
-        @SuppressWarnings("unchecked")
-        Map<String, Object> propMap =
+        final Map<String, Object> propMap =
             (Map<String, Object>) JSONUtils.parseJSONFromString(content);
 
-        for (Map.Entry<String, Object> entry : propMap.entrySet()) {
+        for (final Map.Entry<String, Object> entry : propMap.entrySet()) {
           outputProps.put(entry.getKey(), entry.getValue().toString());
         }
       }
       return outputProps;
-    } catch (FileNotFoundException e) {
-      log.info(String.format("File[%s] wasn't found, returning empty props.",
+    } catch (final FileNotFoundException e) {
+      this.log.info(String.format("File[%s] wasn't found, returning empty props.",
           outputPropertiesFile));
       return new Props();
-    } catch (Exception e) {
-      log.error(
+    } catch (final Exception e) {
+      this.log.error(
           "Exception thrown when trying to load output file props.  Returning empty Props instead of failing.  Is this really the best thing to do?",
           e);
       return new Props();
@@ -188,38 +201,21 @@ public abstract class AbstractProcessJob extends AbstractJob {
   }
 
   public File createFlattenedPropsFile(final String workingDir) {
-    File directory = new File(workingDir);
+    final File directory = new File(workingDir);
     File tempFile = null;
     try {
       // The temp file prefix must be at least 3 characters.
       tempFile = File.createTempFile(getId() + "_props_", "_tmp", directory);
-      jobProps.storeFlattened(tempFile);
-    } catch (IOException e) {
+      this.jobProps.storeFlattened(tempFile);
+    } catch (final IOException e) {
       throw new RuntimeException("Failed to create temp property file ", e);
     }
 
     return tempFile;
   }
 
-  public static File createOutputPropsFile(final String id,
-      final String workingDir) {
-    System.err.println("cwd=" + workingDir);
-
-    File directory = new File(workingDir);
-    File tempFile = null;
-    try {
-      tempFile = File.createTempFile(id + "_output_", "_tmp", directory);
-    } catch (IOException e) {
-      System.err.println("Failed to create temp output property file :\n");
-      e.printStackTrace(System.err);
-      throw new RuntimeException("Failed to create temp output property file ",
-          e);
-    }
-    return tempFile;
-  }
-
   public void generateProperties(final File outputFile) {
-    generatedProperties = loadOutputFileProps(outputFile);
+    this.generatedProperties = loadOutputFileProps(outputFile);
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/JavaProcessJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/JavaProcessJob.java
index 707383b..4f1a6c8 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/JavaProcessJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/JavaProcessJob.java
@@ -16,19 +16,18 @@
 
 package azkaban.jobExecutor;
 
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-
 import azkaban.project.DirectoryFlowLoader;
 import azkaban.server.AzkabanServer;
 import azkaban.utils.Pair;
 import azkaban.utils.Props;
 import azkaban.utils.Utils;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.log4j.Logger;
 
 public class JavaProcessJob extends ProcessJob {
+
   public static final String CLASSPATH = "classpath";
   public static final String GLOBAL_CLASSPATH = "global.classpaths";
   public static final String JAVA_CLASS = "java.class";
@@ -43,14 +42,14 @@ public class JavaProcessJob extends ProcessJob {
 
   public static String JAVA_COMMAND = "java";
 
-  public JavaProcessJob(String jobid, Props sysProps, Props jobProps,
-      Logger logger) {
+  public JavaProcessJob(final String jobid, final Props sysProps, final Props jobProps,
+      final Logger logger) {
     super(jobid, sysProps, jobProps, logger);
   }
 
   @Override
   protected List<String> getCommandList() {
-    ArrayList<String> list = new ArrayList<String>();
+    final ArrayList<String> list = new ArrayList<>();
     list.add(createCommandLine());
     return list;
   }
@@ -72,7 +71,7 @@ public class JavaProcessJob extends ProcessJob {
   }
 
   protected String getClassPathParam() {
-    List<String> classPath = getClassPaths();
+    final List<String> classPath = getClassPaths();
     if (classPath == null || classPath.size() == 0) {
       return "";
     }
@@ -82,27 +81,27 @@ public class JavaProcessJob extends ProcessJob {
 
   protected List<String> getClassPaths() {
 
-    List<String> classPaths = getJobProps().getStringList(CLASSPATH, null, ",");
+    final List<String> classPaths = getJobProps().getStringList(CLASSPATH, null, ",");
 
-    ArrayList<String> classpathList = new ArrayList<String>();
+    final ArrayList<String> classpathList = new ArrayList<>();
     // Adding global properties used system wide.
     if (getJobProps().containsKey(GLOBAL_CLASSPATH)) {
-      List<String> globalClasspath =
+      final List<String> globalClasspath =
           getJobProps().getStringList(GLOBAL_CLASSPATH);
-      for (String global : globalClasspath) {
+      for (final String global : globalClasspath) {
         getLog().info("Adding to global classpath:" + global);
         classpathList.add(global);
       }
     }
 
     if (classPaths == null) {
-      File path = new File(getPath());
+      final File path = new File(getPath());
       // File parent = path.getParentFile();
       getLog().info(
           "No classpath specified. Trying to load classes from " + path);
 
       if (path != null) {
-        for (File file : path.listFiles()) {
+        for (final File file : path.listFiles()) {
           if (file.getName().endsWith(".jar")) {
             // log.info("Adding to classpath:" + file.getName());
             classpathList.add(file.getName());
@@ -130,7 +129,7 @@ public class JavaProcessJob extends ProcessJob {
   }
 
   protected String getJVMArguments() {
-    String globalJVMArgs = getJobProps().getString(GLOBAL_JVM_PARAMS, null);
+    final String globalJVMArgs = getJobProps().getString(GLOBAL_JVM_PARAMS, null);
 
     if (globalJVMArgs == null) {
       return getJobProps().getString(JVM_PARAMS, "");
@@ -139,10 +138,10 @@ public class JavaProcessJob extends ProcessJob {
     return globalJVMArgs + " " + getJobProps().getString(JVM_PARAMS, "");
   }
 
-  protected String createArguments(List<String> arguments, String separator) {
+  protected String createArguments(final List<String> arguments, final String separator) {
     if (arguments != null && arguments.size() > 0) {
       String param = "";
-      for (String arg : arguments) {
+      for (final String arg : arguments) {
         param += arg + separator;
       }
 
@@ -154,29 +153,33 @@ public class JavaProcessJob extends ProcessJob {
 
   @Override
   protected Pair<Long, Long> getProcMemoryRequirement() throws Exception {
-    String strXms = getInitialMemorySize();
-    String strXmx = getMaxMemorySize();
-    long xms = Utils.parseMemString(strXms);
-    long xmx = Utils.parseMemString(strXmx);
+    final String strXms = getInitialMemorySize();
+    final String strXmx = getMaxMemorySize();
+    final long xms = Utils.parseMemString(strXms);
+    final long xmx = Utils.parseMemString(strXmx);
 
-    Props azkabanProperties = AzkabanServer.getAzkabanProperties();
+    final Props azkabanProperties = AzkabanServer.getAzkabanProperties();
     if (azkabanProperties != null) {
-      String maxXms = azkabanProperties.getString(DirectoryFlowLoader.JOB_MAX_XMS, DirectoryFlowLoader.MAX_XMS_DEFAULT);
-      String maxXmx = azkabanProperties.getString(DirectoryFlowLoader.JOB_MAX_XMX, DirectoryFlowLoader.MAX_XMX_DEFAULT);
-      long sizeMaxXms = Utils.parseMemString(maxXms);
-      long sizeMaxXmx = Utils.parseMemString(maxXmx);
+      final String maxXms = azkabanProperties
+          .getString(DirectoryFlowLoader.JOB_MAX_XMS, DirectoryFlowLoader.MAX_XMS_DEFAULT);
+      final String maxXmx = azkabanProperties
+          .getString(DirectoryFlowLoader.JOB_MAX_XMX, DirectoryFlowLoader.MAX_XMX_DEFAULT);
+      final long sizeMaxXms = Utils.parseMemString(maxXms);
+      final long sizeMaxXmx = Utils.parseMemString(maxXmx);
 
       if (xms > sizeMaxXms) {
-        throw new Exception(String.format("%s: Xms value has exceeded the allowed limit (max Xms = %s)",
+        throw new Exception(
+            String.format("%s: Xms value has exceeded the allowed limit (max Xms = %s)",
                 getId(), maxXms));
       }
 
       if (xmx > sizeMaxXmx) {
-        throw new Exception(String.format("%s: Xmx value has exceeded the allowed limit (max Xmx = %s)",
+        throw new Exception(
+            String.format("%s: Xmx value has exceeded the allowed limit (max Xmx = %s)",
                 getId(), maxXmx));
       }
     }
 
-    return new Pair<Long, Long>(xms, xmx);
+    return new Pair<>(xms, xmx);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/Job.java b/azkaban-common/src/main/java/azkaban/jobExecutor/Job.java
index cf82636..02c72d9 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/Job.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/Job.java
@@ -33,8 +33,6 @@ public interface Job {
 
   /**
    * Returns a unique(should be checked in xml) string name/id for the Job.
-   *
-   * @return
    */
   public String getId();
 
@@ -61,15 +59,11 @@ public interface Job {
 
   /**
    * Get the generated properties from this job.
-   *
-   * @return
    */
   public Props getJobGeneratedProperties();
 
   /**
    * Determine if the job was cancelled.
-   *
-   * @return
    */
   public boolean isCanceled();
 }
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/LongArgJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/LongArgJob.java
index 8dd248f..53488b0 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/LongArgJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/LongArgJob.java
@@ -16,17 +16,15 @@
 
 package azkaban.jobExecutor;
 
+import azkaban.jobExecutor.utils.process.AzkabanProcess;
+import azkaban.jobExecutor.utils.process.AzkabanProcessBuilder;
+import azkaban.utils.Props;
 import java.io.File;
 import java.util.HashSet;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Props;
-import azkaban.jobExecutor.utils.process.AzkabanProcess;
-import azkaban.jobExecutor.utils.process.AzkabanProcessBuilder;
-
 /**
  * A job that passes all the job properties as command line arguments in "long"
  * format, e.g. --key1 value1 --key2 value2 ...
@@ -37,13 +35,13 @@ public abstract class LongArgJob extends AbstractProcessJob {
   private final AzkabanProcessBuilder builder;
   private volatile AzkabanProcess process;
 
-  public LongArgJob(String jobid, String[] command, Props sysProps,
-      Props jobProps, Logger log) {
-    this(jobid, command, sysProps, jobProps, log, new HashSet<String>(0));
+  public LongArgJob(final String jobid, final String[] command, final Props sysProps,
+      final Props jobProps, final Logger log) {
+    this(jobid, command, sysProps, jobProps, log, new HashSet<>(0));
   }
 
-  public LongArgJob(String jobid, String[] command, Props sysProps,
-      Props jobProp, Logger log, Set<String> suppressedKeys) {
+  public LongArgJob(final String jobid, final String[] command, final Props sysProps,
+      final Props jobProp, final Logger log, final Set<String> suppressedKeys) {
     super(jobid, sysProps, jobProp, log);
 
     this.builder =
@@ -57,29 +55,29 @@ public abstract class LongArgJob extends AbstractProcessJob {
   public void run() throws Exception {
     try {
       resolveProps();
-    } catch (Exception e) {
+    } catch (final Exception e) {
       error("Bad property definition! " + e.getMessage());
     }
 
-    long startMs = System.currentTimeMillis();
-    info("Command: " + builder.getCommandString());
-    if (builder.getEnv().size() > 0) {
-      info("Environment variables: " + builder.getEnv());
+    final long startMs = System.currentTimeMillis();
+    info("Command: " + this.builder.getCommandString());
+    if (this.builder.getEnv().size() > 0) {
+      info("Environment variables: " + this.builder.getEnv());
     }
-    info("Working directory: " + builder.getWorkingDir());
+    info("Working directory: " + this.builder.getWorkingDir());
 
-    File[] propFiles = initPropsFiles();
+    final File[] propFiles = initPropsFiles();
 
     // print out the Job properties to the job log.
     this.logJobProperties();
 
     boolean success = false;
-    this.process = builder.build();
+    this.process = this.builder.build();
     try {
       this.process.run();
       success = true;
-    } catch (Exception e) {
-      for (File file : propFiles) {
+    } catch (final Exception e) {
+      for (final File file : propFiles) {
         if (file != null && file.exists()) {
           file.delete();
         }
@@ -95,7 +93,7 @@ public abstract class LongArgJob extends AbstractProcessJob {
     // Get the output properties from this job.
     generateProperties(propFiles[1]);
 
-    for (File file : propFiles) {
+    for (final File file : propFiles) {
       if (file != null && file.exists()) {
         file.delete();
       }
@@ -112,26 +110,26 @@ public abstract class LongArgJob extends AbstractProcessJob {
 
   @Override
   public void cancel() throws InterruptedException {
-    if (process == null) {
+    if (this.process == null) {
       throw new IllegalStateException("Not started.");
     }
 
-    boolean killed = process.softKill(KILL_TIME_MS, TimeUnit.MILLISECONDS);
+    final boolean killed = this.process.softKill(KILL_TIME_MS, TimeUnit.MILLISECONDS);
     if (!killed) {
       warn("Kill with signal TERM failed. Killing with KILL signal.");
-      process.hardKill();
+      this.process.hardKill();
     }
   }
 
   @Override
   public double getProgress() {
-    return process != null && process.isComplete() ? 1.0 : 0.0;
+    return this.process != null && this.process.isComplete() ? 1.0 : 0.0;
   }
 
-  private void appendProps(Set<String> suppressed) {
-    AzkabanProcessBuilder builder = this.getBuilder();
-    Props props = getJobProps();
-    for (String key : props.getKeySet()) {
+  private void appendProps(final Set<String> suppressed) {
+    final AzkabanProcessBuilder builder = this.getBuilder();
+    final Props props = getJobProps();
+    for (final String key : props.getKeySet()) {
       if (!suppressed.contains(key)) {
         builder.addArg("--" + key, props.get(key));
       }
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/NoopJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/NoopJob.java
index 0f6c376..dd19c8b 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/NoopJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/NoopJob.java
@@ -16,17 +16,17 @@
 
 package azkaban.jobExecutor;
 
-import org.apache.log4j.Logger;
-
 import azkaban.utils.Props;
+import org.apache.log4j.Logger;
 
 /**
  * A no-op job.
  */
 public class NoopJob implements Job {
-  private String jobId;
 
-  public NoopJob(String jobid, Props props, Props jobProps, Logger log) {
+  private final String jobId;
+
+  public NoopJob(final String jobid, final Props props, final Props jobProps, final Logger log) {
     this.jobId = jobid;
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/ProcessJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/ProcessJob.java
index 3a132fc..a055a07 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/ProcessJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/ProcessJob.java
@@ -16,26 +16,24 @@
 
 package azkaban.jobExecutor;
 
+import static azkaban.ServiceProvider.SERVICE_PROVIDER;
+
 import azkaban.Constants;
+import azkaban.flow.CommonJobProperties;
+import azkaban.jobExecutor.utils.process.AzkabanProcess;
+import azkaban.jobExecutor.utils.process.AzkabanProcessBuilder;
 import azkaban.metrics.CommonMetrics;
+import azkaban.utils.Pair;
+import azkaban.utils.Props;
+import azkaban.utils.SystemMemoryInfo;
 import java.io.File;
 import java.time.Duration;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
-
 import org.apache.log4j.Logger;
 
-import azkaban.flow.CommonJobProperties;
-import azkaban.jobExecutor.utils.process.AzkabanProcess;
-import azkaban.jobExecutor.utils.process.AzkabanProcessBuilder;
-import azkaban.utils.Pair;
-import azkaban.utils.Props;
-import azkaban.utils.SystemMemoryInfo;
-
-import static azkaban.ServiceProvider.*;
-
 
 /**
  * A job that runs a simple unix command
@@ -43,21 +41,14 @@ import static azkaban.ServiceProvider.*;
 public class ProcessJob extends AbstractProcessJob {
 
   public static final String COMMAND = "command";
-
-  private static final Duration KILL_TIME = Duration.ofSeconds(30);
-
-  private volatile AzkabanProcess process;
-
-  private static final String MEMCHECK_ENABLED = "memCheck.enabled";
-
   public static final String AZKABAN_MEMORY_CHECK = "azkaban.memory.check";
-
   public static final String NATIVE_LIB_FOLDER = "azkaban.native.lib";
   public static final String EXECUTE_AS_USER = "execute.as.user";
-
   public static final String USER_TO_PROXY = "user.to.proxy";
   public static final String KRB5CCNAME = "KRB5CCNAME";
-
+  private static final Duration KILL_TIME = Duration.ofSeconds(30);
+  private static final String MEMCHECK_ENABLED = "memCheck.enabled";
+  private volatile AzkabanProcess process;
   private volatile boolean killed = false;
 
   public ProcessJob(final String jobId, final Props sysProps,
@@ -65,50 +56,111 @@ public class ProcessJob extends AbstractProcessJob {
     super(jobId, sysProps, jobProps, log);
   }
 
+  /**
+   * Splits the command into a unix like command line structure. Quotes and
+   * single quotes are treated as nested strings.
+   */
+  public static String[] partitionCommandLine(final String command) {
+    final ArrayList<String> commands = new ArrayList<>();
+
+    int index = 0;
+
+    StringBuffer buffer = new StringBuffer(command.length());
+
+    boolean isApos = false;
+    boolean isQuote = false;
+    while (index < command.length()) {
+      final char c = command.charAt(index);
+
+      switch (c) {
+        case ' ':
+          if (!isQuote && !isApos) {
+            final String arg = buffer.toString();
+            buffer = new StringBuffer(command.length() - index);
+            if (arg.length() > 0) {
+              commands.add(arg);
+            }
+          } else {
+            buffer.append(c);
+          }
+          break;
+        case '\'':
+          if (!isQuote) {
+            isApos = !isApos;
+          } else {
+            buffer.append(c);
+          }
+          break;
+        case '"':
+          if (!isApos) {
+            isQuote = !isQuote;
+          } else {
+            buffer.append(c);
+          }
+          break;
+        default:
+          buffer.append(c);
+      }
+
+      index++;
+    }
+
+    if (buffer.length() > 0) {
+      final String arg = buffer.toString();
+      commands.add(arg);
+    }
+
+    return commands.toArray(new String[commands.size()]);
+  }
+
   @Override
   public void run() throws Exception {
     try {
       resolveProps();
-    } catch (Exception e) {
+    } catch (final Exception e) {
       handleError("Bad property definition! " + e.getMessage(), e);
     }
 
-    if (sysProps.getBoolean(MEMCHECK_ENABLED, true)
-        && jobProps.getBoolean(AZKABAN_MEMORY_CHECK, true)) {
-      Pair<Long, Long> memPair = getProcMemoryRequirement();
-      long xms = memPair.getFirst();
-      long xmx = memPair.getSecond();
+    if (this.sysProps.getBoolean(MEMCHECK_ENABLED, true)
+        && this.jobProps.getBoolean(AZKABAN_MEMORY_CHECK, true)) {
+      final Pair<Long, Long> memPair = getProcMemoryRequirement();
+      final long xms = memPair.getFirst();
+      final long xmx = memPair.getSecond();
       // retry backoff in ms
-      String oomMsg = String.format("Cannot request memory (Xms %d kb, Xmx %d kb) from system for job %s",
-          xms, xmx, getId());
+      final String oomMsg = String
+          .format("Cannot request memory (Xms %d kb, Xmx %d kb) from system for job %s",
+              xms, xmx, getId());
       int attempt;
       boolean isMemGranted = true;
 
       //todo HappyRay: move to proper Guice after this class is refactored.
-      SystemMemoryInfo memInfo = SERVICE_PROVIDER.getInstance(SystemMemoryInfo.class);
-      for(attempt = 1; attempt <= Constants.MEMORY_CHECK_RETRY_LIMIT; attempt++) {
+      final SystemMemoryInfo memInfo = SERVICE_PROVIDER.getInstance(SystemMemoryInfo.class);
+      for (attempt = 1; attempt <= Constants.MEMORY_CHECK_RETRY_LIMIT; attempt++) {
         isMemGranted = memInfo.canSystemGrantMemory(xmx);
         if (isMemGranted) {
           info(String.format("Memory granted for job %s", getId()));
-          if(attempt > 1) {
+          if (attempt > 1) {
             CommonMetrics.INSTANCE.decrementOOMJobWaitCount();
           }
           break;
         }
         if (attempt < Constants.MEMORY_CHECK_RETRY_LIMIT) {
-          info(String.format(oomMsg + ", sleep for %s secs and retry, attempt %s of %s", TimeUnit.MILLISECONDS.toSeconds(
-              Constants.MEMORY_CHECK_INTERVAL_MS), attempt, Constants.MEMORY_CHECK_RETRY_LIMIT));
+          info(String.format(oomMsg + ", sleep for %s secs and retry, attempt %s of %s",
+              TimeUnit.MILLISECONDS.toSeconds(
+                  Constants.MEMORY_CHECK_INTERVAL_MS), attempt,
+              Constants.MEMORY_CHECK_RETRY_LIMIT));
           if (attempt == 1) {
             CommonMetrics.INSTANCE.incrementOOMJobWaitCount();
           }
           synchronized (this) {
             try {
               this.wait(Constants.MEMORY_CHECK_INTERVAL_MS);
-            } catch (InterruptedException e) {
-              info(String.format("Job %s interrupted while waiting for memory check retry", getId()));
+            } catch (final InterruptedException e) {
+              info(String
+                  .format("Job %s interrupted while waiting for memory check retry", getId()));
             }
           }
-          if(killed) {
+          if (this.killed) {
             CommonMetrics.INSTANCE.decrementOOMJobWaitCount();
             info(String.format("Job %s was killed while waiting for memory check retry", getId()));
             return;
@@ -122,47 +174,45 @@ public class ProcessJob extends AbstractProcessJob {
       }
     }
 
-
     List<String> commands = null;
     try {
       commands = getCommandList();
-    } catch (Exception e) {
+    } catch (final Exception e) {
       handleError("Job set up failed " + e.getCause(), e);
     }
 
-    long startMs = System.currentTimeMillis();
+    final long startMs = System.currentTimeMillis();
 
     if (commands == null) {
       handleError("There are no commands to execute", null);
     }
 
     info(commands.size() + " commands to execute.");
-    File[] propFiles = initPropsFiles();
+    final File[] propFiles = initPropsFiles();
 
     // change krb5ccname env var so that each job execution gets its own cache
-    Map<String, String> envVars = getEnvironmentVariables();
-    envVars.put(KRB5CCNAME, getKrb5ccname(jobProps));
+    final Map<String, String> envVars = getEnvironmentVariables();
+    envVars.put(KRB5CCNAME, getKrb5ccname(this.jobProps));
 
     // determine whether to run as Azkaban or run as effectiveUser,
     // by default, run as effectiveUser
     String executeAsUserBinaryPath = null;
     String effectiveUser = null;
-    boolean isExecuteAsUser = sysProps.getBoolean(EXECUTE_AS_USER, true);
+    final boolean isExecuteAsUser = this.sysProps.getBoolean(EXECUTE_AS_USER, true);
 
     // nativeLibFolder specifies the path for execute-as-user file,
     // which will change user from Azkaban to effectiveUser
     if (isExecuteAsUser) {
-      String nativeLibFolder = sysProps.getString(NATIVE_LIB_FOLDER);
+      final String nativeLibFolder = this.sysProps.getString(NATIVE_LIB_FOLDER);
       executeAsUserBinaryPath =
           String.format("%s/%s", nativeLibFolder, "execute-as-user");
-      effectiveUser = getEffectiveUser(jobProps);
+      effectiveUser = getEffectiveUser(this.jobProps);
       if ("root".equals(effectiveUser)) {
         throw new RuntimeException(
             "Not permitted to proxy as root through Azkaban");
       }
     }
 
-
     for (String command : commands) {
       AzkabanProcessBuilder builder = null;
       if (isExecuteAsUser) {
@@ -196,10 +246,12 @@ public class ProcessJob extends AbstractProcessJob {
       try {
         this.process.run();
         success = true;
-      } catch (Throwable e) {
-        for (File file : propFiles)
-          if (file != null && file.exists())
+      } catch (final Throwable e) {
+        for (final File file : propFiles) {
+          if (file != null && file.exists()) {
             file.delete();
+          }
+        }
         throw new RuntimeException(e);
       } finally {
         this.process = null;
@@ -217,24 +269,26 @@ public class ProcessJob extends AbstractProcessJob {
    * <pre>
    * This method extracts the kerberos ticket cache file name from the jobprops.
    * This method will ensure that each job execution will have its own kerberos ticket cache file
-   * Given that the code only sets an environmental variable, the number of files created corresponds
-   * to the number of processes that are doing kinit in their flow, which should not be an inordinately
+   * Given that the code only sets an environmental variable, the number of files created
+   * corresponds
+   * to the number of processes that are doing kinit in their flow, which should not be an
+   * inordinately
    * high number.
    * </pre>
    *
    * @return file name: the kerberos ticket cache file to use
    */
-  private String getKrb5ccname(Props jobProps) {
-    String effectiveUser = getEffectiveUser(jobProps);
-    String projectName =
+  private String getKrb5ccname(final Props jobProps) {
+    final String effectiveUser = getEffectiveUser(jobProps);
+    final String projectName =
         jobProps.getString(CommonJobProperties.PROJECT_NAME).replace(" ", "_");
-    String flowId =
+    final String flowId =
         jobProps.getString(CommonJobProperties.FLOW_ID).replace(" ", "_");
-    String jobId =
+    final String jobId =
         jobProps.getString(CommonJobProperties.JOB_ID).replace(" ", "_");
     // execId should be an int and should not have space in it, ever
-    String execId = jobProps.getString(CommonJobProperties.EXEC_ID);
-    String krb5ccname =
+    final String execId = jobProps.getString(CommonJobProperties.EXEC_ID);
+    final String krb5ccname =
         String.format("/tmp/krb5cc__%s__%s__%s__%s__%s", projectName, flowId,
             jobId, execId, effectiveUser);
 
@@ -248,10 +302,9 @@ public class ProcessJob extends AbstractProcessJob {
    * 2. SUBMIT_USER
    * </pre>
    *
-   * @param jobProps
    * @return the user that Azkaban is going to execute as
    */
-  private String getEffectiveUser(Props jobProps) {
+  private String getEffectiveUser(final Props jobProps) {
     String effectiveUser = null;
     if (jobProps.containsKey(USER_TO_PROXY)) {
       effectiveUser = jobProps.getString(USER_TO_PROXY);
@@ -273,10 +326,10 @@ public class ProcessJob extends AbstractProcessJob {
    * @return pair of min/max memory size
    */
   protected Pair<Long, Long> getProcMemoryRequirement() throws Exception {
-    return new Pair<Long, Long>(0L, 0L);
+    return new Pair<>(0L, 0L);
   }
 
-  protected void handleError(String errorMsg, Exception e) throws Exception {
+  protected void handleError(final String errorMsg, final Exception e) throws Exception {
     error(errorMsg);
     if (e != null) {
       throw new Exception(errorMsg, e);
@@ -286,10 +339,10 @@ public class ProcessJob extends AbstractProcessJob {
   }
 
   protected List<String> getCommandList() {
-    List<String> commands = new ArrayList<String>();
-    commands.add(jobProps.getString(COMMAND));
-    for (int i = 1; jobProps.containsKey(COMMAND + "." + i); i++) {
-      commands.add(jobProps.getString(COMMAND + "." + i));
+    final List<String> commands = new ArrayList<>();
+    commands.add(this.jobProps.getString(COMMAND));
+    for (int i = 1; this.jobProps.containsKey(COMMAND + "." + i); i++) {
+      commands.add(this.jobProps.getString(COMMAND + "." + i));
     }
 
     return commands;
@@ -299,89 +352,31 @@ public class ProcessJob extends AbstractProcessJob {
   public void cancel() throws InterruptedException {
     // in case the job is waiting
     synchronized (this) {
-      killed = true;
+      this.killed = true;
       this.notify();
     }
 
-    if (process == null)
+    if (this.process == null) {
       throw new IllegalStateException("Not started.");
-    boolean processkilled = process.softKill(KILL_TIME.toMillis(), TimeUnit.MILLISECONDS);
+    }
+    final boolean processkilled = this.process
+        .softKill(KILL_TIME.toMillis(), TimeUnit.MILLISECONDS);
     if (!processkilled) {
       warn("Kill with signal TERM failed. Killing with KILL signal.");
-      process.hardKill();
+      this.process.hardKill();
     }
   }
 
   @Override
   public double getProgress() {
-    return process != null && process.isComplete() ? 1.0 : 0.0;
+    return this.process != null && this.process.isComplete() ? 1.0 : 0.0;
   }
 
   public int getProcessId() {
-    return process.getProcessId();
+    return this.process.getProcessId();
   }
 
   public String getPath() {
-    return _jobPath == null ? "" : _jobPath;
-  }
-
-  /**
-   * Splits the command into a unix like command line structure. Quotes and
-   * single quotes are treated as nested strings.
-   *
-   * @param command
-   * @return
-   */
-  public static String[] partitionCommandLine(final String command) {
-    ArrayList<String> commands = new ArrayList<String>();
-
-    int index = 0;
-
-    StringBuffer buffer = new StringBuffer(command.length());
-
-    boolean isApos = false;
-    boolean isQuote = false;
-    while (index < command.length()) {
-      char c = command.charAt(index);
-
-      switch (c) {
-      case ' ':
-        if (!isQuote && !isApos) {
-          String arg = buffer.toString();
-          buffer = new StringBuffer(command.length() - index);
-          if (arg.length() > 0) {
-            commands.add(arg);
-          }
-        } else {
-          buffer.append(c);
-        }
-        break;
-      case '\'':
-        if (!isQuote) {
-          isApos = !isApos;
-        } else {
-          buffer.append(c);
-        }
-        break;
-      case '"':
-        if (!isApos) {
-          isQuote = !isQuote;
-        } else {
-          buffer.append(c);
-        }
-        break;
-      default:
-        buffer.append(c);
-      }
-
-      index++;
-    }
-
-    if (buffer.length() > 0) {
-      String arg = buffer.toString();
-      commands.add(arg);
-    }
-
-    return commands.toArray(new String[commands.size()]);
+    return this._jobPath == null ? "" : this._jobPath;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/PythonJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/PythonJob.java
index 135790e..ce7aa2d 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/PythonJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/PythonJob.java
@@ -16,22 +16,21 @@
 
 package azkaban.jobExecutor;
 
-import org.apache.log4j.Logger;
-
-import com.google.common.collect.ImmutableSet;
-
 import azkaban.utils.Props;
+import com.google.common.collect.ImmutableSet;
+import org.apache.log4j.Logger;
 
 public class PythonJob extends LongArgJob {
 
   private static final String PYTHON_BINARY_KEY = "python";
   private static final String SCRIPT_KEY = "script";
 
-  public PythonJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+  public PythonJob(final String jobid, final Props sysProps, final Props jobProps,
+      final Logger log) {
     super(jobid,
-        new String[] {
-          jobProps.getString(PYTHON_BINARY_KEY, "python"),
-          jobProps.getString(SCRIPT_KEY)
+        new String[]{
+            jobProps.getString(PYTHON_BINARY_KEY, "python"),
+            jobProps.getString(SCRIPT_KEY)
         },
         sysProps, jobProps, log,
         ImmutableSet.of(PYTHON_BINARY_KEY, SCRIPT_KEY, JOB_TYPE));
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/RubyJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/RubyJob.java
index a5f88f7..4e118f7 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/RubyJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/RubyJob.java
@@ -16,22 +16,20 @@
 
 package azkaban.jobExecutor;
 
-import org.apache.log4j.Logger;
-
 import azkaban.utils.Props;
-
 import com.google.common.collect.ImmutableSet;
+import org.apache.log4j.Logger;
 
 public class RubyJob extends LongArgJob {
 
   private static final String RUBY_BINARY_KEY = "ruby";
   private static final String SCRIPT_KEY = "script";
 
-  public RubyJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+  public RubyJob(final String jobid, final Props sysProps, final Props jobProps, final Logger log) {
     super(jobid,
-        new String[] {
-          jobProps.getString(RUBY_BINARY_KEY, "ruby"),
-          jobProps.getString(SCRIPT_KEY)
+        new String[]{
+            jobProps.getString(RUBY_BINARY_KEY, "ruby"),
+            jobProps.getString(SCRIPT_KEY)
         },
         sysProps, jobProps, log,
         ImmutableSet.of(RUBY_BINARY_KEY, SCRIPT_KEY, JOB_TYPE));
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/ScriptJob.java b/azkaban-common/src/main/java/azkaban/jobExecutor/ScriptJob.java
index d591d35..845d13b 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/ScriptJob.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/ScriptJob.java
@@ -16,28 +16,26 @@
 
 package azkaban.jobExecutor;
 
-import org.apache.log4j.Logger;
-
-import com.google.common.collect.ImmutableSet;
-
 import azkaban.utils.Props;
+import com.google.common.collect.ImmutableSet;
+import org.apache.log4j.Logger;
 
 /**
  * A script job issues a command of the form [EXECUTABLE] [SCRIPT] --key1 val1
  * ... --key2 val2 executable -- the interpretor command to execute script --
  * the script to pass in (requried)
- *
  */
 public class ScriptJob extends LongArgJob {
 
   private static final String DEFAULT_EXECUTABLE_KEY = "executable";
   private static final String SCRIPT_KEY = "script";
 
-  public ScriptJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+  public ScriptJob(final String jobid, final Props sysProps, final Props jobProps,
+      final Logger log) {
     super(jobid,
-        new String[] {
-          jobProps.getString(DEFAULT_EXECUTABLE_KEY),
-          jobProps.getString(SCRIPT_KEY)
+        new String[]{
+            jobProps.getString(DEFAULT_EXECUTABLE_KEY),
+            jobProps.getString(SCRIPT_KEY)
         },
         sysProps,
         jobProps,
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java
index 45cf665..3bb1aa4 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/JobExecutionException.java
@@ -20,15 +20,15 @@ public class JobExecutionException extends RuntimeException {
 
   private final static long serialVersionUID = 1;
 
-  public JobExecutionException(String message) {
+  public JobExecutionException(final String message) {
     super(message);
   }
 
-  public JobExecutionException(Throwable cause) {
+  public JobExecutionException(final Throwable cause) {
     super(cause);
   }
 
-  public JobExecutionException(String message, Throwable cause) {
+  public JobExecutionException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java
index 9ad88d3..9a88204 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcess.java
@@ -16,6 +16,8 @@
 
 package azkaban.jobExecutor.utils.process;
 
+import azkaban.utils.LogGobbler;
+import com.google.common.base.Joiner;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
@@ -24,25 +26,20 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
-
 import org.apache.commons.io.IOUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
-import azkaban.utils.LogGobbler;
-
-import com.google.common.base.Joiner;
-
 /**
  * An improved version of java.lang.Process.
- * 
+ *
  * Output is read by separate threads to avoid deadlock and logged to log4j
  * loggers.
  */
 public class AzkabanProcess {
-  
+
   public static String KILL_COMMAND = "kill";
-  
+
   private final String workingDir;
   private final List<String> cmd;
   private final Map<String, String> env;
@@ -68,9 +65,9 @@ public class AzkabanProcess {
     this.logger = logger;
   }
 
-  public AzkabanProcess(List<String> cmd, Map<String, String> env,
-      String workingDir, Logger logger, String executeAsUserBinary,
-      String effectiveUser) {
+  public AzkabanProcess(final List<String> cmd, final Map<String, String> env,
+      final String workingDir, final Logger logger, final String executeAsUserBinary,
+      final String effectiveUser) {
     this(cmd, env, workingDir, logger);
     this.isExecuteAsUser = true;
     this.executeAsUserBinary = executeAsUserBinary;
@@ -85,45 +82,45 @@ public class AzkabanProcess {
       throw new IllegalStateException("The process can only be used once.");
     }
 
-    ProcessBuilder builder = new ProcessBuilder(cmd);
-    builder.directory(new File(workingDir));
-    builder.environment().putAll(env);
+    final ProcessBuilder builder = new ProcessBuilder(this.cmd);
+    builder.directory(new File(this.workingDir));
+    builder.environment().putAll(this.env);
     builder.redirectErrorStream(true);
     this.process = builder.start();
     try {
-      this.processId = processId(process);
-      if (processId == 0) {
-        logger.debug("Spawned thread with unknown process id");
+      this.processId = processId(this.process);
+      if (this.processId == 0) {
+        this.logger.debug("Spawned thread with unknown process id");
       } else {
-        logger.debug("Spawned thread with process id " + processId);
+        this.logger.debug("Spawned thread with process id " + this.processId);
       }
 
       this.startupLatch.countDown();
 
-      LogGobbler outputGobbler =
-          new LogGobbler(new InputStreamReader(process.getInputStream()),
-              logger, Level.INFO, 30);
-      LogGobbler errorGobbler =
-          new LogGobbler(new InputStreamReader(process.getErrorStream()),
-              logger, Level.ERROR, 30);
+      final LogGobbler outputGobbler =
+          new LogGobbler(new InputStreamReader(this.process.getInputStream()),
+              this.logger, Level.INFO, 30);
+      final LogGobbler errorGobbler =
+          new LogGobbler(new InputStreamReader(this.process.getErrorStream()),
+              this.logger, Level.ERROR, 30);
 
       outputGobbler.start();
       errorGobbler.start();
       int exitCode = -1;
       try {
-        exitCode = process.waitFor();
-      } catch (InterruptedException e) {
-        logger.info("Process interrupted. Exit code is " + exitCode, e);
+        exitCode = this.process.waitFor();
+      } catch (final InterruptedException e) {
+        this.logger.info("Process interrupted. Exit code is " + exitCode, e);
       }
 
-      completeLatch.countDown();
+      this.completeLatch.countDown();
 
       // try to wait for everything to get logged out before exiting
       outputGobbler.awaitCompletion(5000);
       errorGobbler.awaitCompletion(5000);
 
       if (exitCode != 0) {
-        String output =
+        final String output =
             new StringBuilder().append("Stdout:\n")
                 .append(outputGobbler.getRecentLog()).append("\n\n")
                 .append("Stderr:\n").append(errorGobbler.getRecentLog())
@@ -132,15 +129,15 @@ public class AzkabanProcess {
       }
 
     } finally {
-      IOUtils.closeQuietly(process.getInputStream());
-      IOUtils.closeQuietly(process.getOutputStream());
-      IOUtils.closeQuietly(process.getErrorStream());
+      IOUtils.closeQuietly(this.process.getInputStream());
+      IOUtils.closeQuietly(this.process.getOutputStream());
+      IOUtils.closeQuietly(this.process.getErrorStream());
     }
   }
 
   /**
    * Await the completion of this process
-   * 
+   *
    * @throws InterruptedException if the thread is interrupted while waiting.
    */
   public void awaitCompletion() throws InterruptedException {
@@ -149,7 +146,7 @@ public class AzkabanProcess {
 
   /**
    * Await the start of this process
-   * 
+   *
    * @throws InterruptedException if the thread is interrupted while waiting.
    */
   public void awaitStartup() throws InterruptedException {
@@ -158,7 +155,7 @@ public class AzkabanProcess {
 
   /**
    * Get the process id for this process, if it has started.
-   * 
+   *
    * @return The process id or -1 if it cannot be fetched
    */
   public int getProcessId() {
@@ -168,7 +165,7 @@ public class AzkabanProcess {
 
   /**
    * Attempt to kill the process, waiting up to the given time for it to die
-   * 
+   *
    * @param time The amount of time to wait
    * @param unit The time unit
    * @return true iff this soft kill kills the process in the given wait time.
@@ -176,20 +173,20 @@ public class AzkabanProcess {
   public boolean softKill(final long time, final TimeUnit unit)
       throws InterruptedException {
     checkStarted();
-    if (processId != 0 && isStarted()) {
+    if (this.processId != 0 && isStarted()) {
       try {
-        if (isExecuteAsUser) {
-          String cmd =
-              String.format("%s %s %s %d", executeAsUserBinary,
-                  effectiveUser, KILL_COMMAND, processId);
+        if (this.isExecuteAsUser) {
+          final String cmd =
+              String.format("%s %s %s %d", this.executeAsUserBinary,
+                  this.effectiveUser, KILL_COMMAND, this.processId);
           Runtime.getRuntime().exec(cmd);
         } else {
-          String cmd = String.format("%s %d", KILL_COMMAND, processId);
+          final String cmd = String.format("%s %d", KILL_COMMAND, this.processId);
           Runtime.getRuntime().exec(cmd);
         }
-        return completeLatch.await(time, unit);
-      } catch (IOException e) {
-        logger.error("Kill attempt failed.", e);
+        return this.completeLatch.await(time, unit);
+      } catch (final IOException e) {
+        this.logger.error("Kill attempt failed.", e);
       }
       return false;
     }
@@ -202,39 +199,39 @@ public class AzkabanProcess {
   public void hardKill() {
     checkStarted();
     if (isRunning()) {
-      if (processId != 0) {
+      if (this.processId != 0) {
         try {
-          if (isExecuteAsUser) {
-            String cmd =
-                String.format("%s %s %s -9 %d", executeAsUserBinary,
-                    effectiveUser, KILL_COMMAND, processId);
+          if (this.isExecuteAsUser) {
+            final String cmd =
+                String.format("%s %s %s -9 %d", this.executeAsUserBinary,
+                    this.effectiveUser, KILL_COMMAND, this.processId);
             Runtime.getRuntime().exec(cmd);
           } else {
-            String cmd = String.format("%s -9 %d", KILL_COMMAND, processId);
+            final String cmd = String.format("%s -9 %d", KILL_COMMAND, this.processId);
             Runtime.getRuntime().exec(cmd);
           }
-        } catch (IOException e) {
-          logger.error("Kill attempt failed.", e);
+        } catch (final IOException e) {
+          this.logger.error("Kill attempt failed.", e);
         }
       }
-      process.destroy();
+      this.process.destroy();
     }
   }
 
   /**
    * Attempt to get the process id for this process
-   * 
+   *
    * @param process The process to get the id from
    * @return The id of the process
    */
   private int processId(final java.lang.Process process) {
     int processId = 0;
     try {
-      Field f = process.getClass().getDeclaredField("pid");
+      final Field f = process.getClass().getDeclaredField("pid");
       f.setAccessible(true);
 
       processId = f.getInt(process);
-    } catch (Throwable e) {
+    } catch (final Throwable e) {
       e.printStackTrace();
     }
 
@@ -245,14 +242,14 @@ public class AzkabanProcess {
    * @return true iff the process has been started
    */
   public boolean isStarted() {
-    return startupLatch.getCount() == 0L;
+    return this.startupLatch.getCount() == 0L;
   }
 
   /**
    * @return true iff the process has completed
    */
   public boolean isComplete() {
-    return completeLatch.getCount() == 0L;
+    return this.completeLatch.getCount() == 0L;
   }
 
   /**
@@ -270,15 +267,15 @@ public class AzkabanProcess {
 
   @Override
   public String toString() {
-    return "Process(cmd = " + Joiner.on(" ").join(cmd) + ", env = " + env
-        + ", cwd = " + workingDir + ")";
+    return "Process(cmd = " + Joiner.on(" ").join(this.cmd) + ", env = " + this.env
+        + ", cwd = " + this.workingDir + ")";
   }
 
   public boolean isExecuteAsUser() {
-    return isExecuteAsUser;
+    return this.isExecuteAsUser;
   }
 
   public String getEffectiveUser() {
-    return effectiveUser;
+    return this.effectiveUser;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java
index 9e2c2f7..26ec563 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java
@@ -16,23 +16,21 @@
 
 package azkaban.jobExecutor.utils.process;
 
+import com.google.common.base.Joiner;
 import java.io.File;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.log4j.Logger;
 
-import com.google.common.base.Joiner;
-
 /**
  * Helper code for building a process
  */
 public class AzkabanProcessBuilder {
 
-  private List<String> cmd = new ArrayList<String>();
-  private Map<String, String> env = new HashMap<String, String>();
+  private final List<String> cmd = new ArrayList<>();
+  private Map<String, String> env = new HashMap<>();
   private String workingDir = System.getProperty("user.dir");
   private Logger logger = Logger.getLogger(AzkabanProcess.class);
   private boolean isExecuteAsUser = false;
@@ -42,36 +40,32 @@ public class AzkabanProcessBuilder {
   private int stdErrSnippetSize = 30;
   private int stdOutSnippetSize = 30;
 
-  public AzkabanProcessBuilder(String... command) {
+  public AzkabanProcessBuilder(final String... command) {
     addArg(command);
   }
 
-  public AzkabanProcessBuilder addArg(String... command) {
-    for (String c : command)
-      cmd.add(c);
+  public AzkabanProcessBuilder addArg(final String... command) {
+    for (final String c : command) {
+      this.cmd.add(c);
+    }
     return this;
   }
 
-  public AzkabanProcessBuilder setWorkingDir(String dir) {
+  public AzkabanProcessBuilder setWorkingDir(final String dir) {
     this.workingDir = dir;
     return this;
   }
 
-  public AzkabanProcessBuilder setWorkingDir(File f) {
-    return setWorkingDir(f.getAbsolutePath());
-  }
-
   public String getWorkingDir() {
     return this.workingDir;
   }
 
-  public AzkabanProcessBuilder addEnv(String variable, String value) {
-    env.put(variable, value);
-    return this;
+  public AzkabanProcessBuilder setWorkingDir(final File f) {
+    return setWorkingDir(f.getAbsolutePath());
   }
 
-  public AzkabanProcessBuilder setEnv(Map<String, String> m) {
-    this.env = m;
+  public AzkabanProcessBuilder addEnv(final String variable, final String value) {
+    this.env.put(variable, value);
     return this;
   }
 
@@ -79,13 +73,8 @@ public class AzkabanProcessBuilder {
     return this.env;
   }
 
-  public AzkabanProcessBuilder setStdErrorSnippetSize(int size) {
-    this.stdErrSnippetSize = size;
-    return this;
-  }
-
-  public AzkabanProcessBuilder setStdOutSnippetSize(int size) {
-    this.stdOutSnippetSize = size;
+  public AzkabanProcessBuilder setEnv(final Map<String, String> m) {
+    this.env = m;
     return this;
   }
 
@@ -93,21 +82,31 @@ public class AzkabanProcessBuilder {
     return this.stdErrSnippetSize;
   }
 
+  public AzkabanProcessBuilder setStdErrorSnippetSize(final int size) {
+    this.stdErrSnippetSize = size;
+    return this;
+  }
+
   public int getStdOutSnippetSize() {
     return this.stdOutSnippetSize;
   }
 
-  public AzkabanProcessBuilder setLogger(Logger logger) {
+  public AzkabanProcessBuilder setStdOutSnippetSize(final int size) {
+    this.stdOutSnippetSize = size;
+    return this;
+  }
+
+  public AzkabanProcessBuilder setLogger(final Logger logger) {
     this.logger = logger;
     return this;
   }
 
   public AzkabanProcess build() {
-    if (isExecuteAsUser) {
-      return new AzkabanProcess(cmd, env, workingDir, logger,
-          executeAsUserBinaryPath, effectiveUser);
+    if (this.isExecuteAsUser) {
+      return new AzkabanProcess(this.cmd, this.env, this.workingDir, this.logger,
+          this.executeAsUserBinaryPath, this.effectiveUser);
     } else {
-      return new AzkabanProcess(cmd, env, workingDir, logger);
+      return new AzkabanProcess(this.cmd, this.env, this.workingDir, this.logger);
     }
   }
 
@@ -121,8 +120,8 @@ public class AzkabanProcessBuilder {
 
   @Override
   public String toString() {
-    return "ProcessBuilder(cmd = " + Joiner.on(" ").join(cmd) + ", env = "
-        + env + ", cwd = " + workingDir + ")";
+    return "ProcessBuilder(cmd = " + Joiner.on(" ").join(this.cmd) + ", env = "
+        + this.env + ", cwd = " + this.workingDir + ")";
   }
 
   public AzkabanProcessBuilder enableExecuteAsUser() {
@@ -130,12 +129,12 @@ public class AzkabanProcessBuilder {
     return this;
   }
 
-  public AzkabanProcessBuilder setExecuteAsUserBinaryPath(String executeAsUserBinaryPath) {
+  public AzkabanProcessBuilder setExecuteAsUserBinaryPath(final String executeAsUserBinaryPath) {
     this.executeAsUserBinaryPath = executeAsUserBinaryPath;
     return this;
   }
 
-  public AzkabanProcessBuilder setEffectiveUser(String effectiveUser) {
+  public AzkabanProcessBuilder setEffectiveUser(final String effectiveUser) {
     this.effectiveUser = effectiveUser;
     return this;
   }
diff --git a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java
index 9d031e4..473dace 100644
--- a/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java
+++ b/azkaban-common/src/main/java/azkaban/jobExecutor/utils/process/ProcessFailureException.java
@@ -23,13 +23,13 @@ public class ProcessFailureException extends RuntimeException {
   private final int exitCode;
   private final String logSnippet;
 
-  public ProcessFailureException(int exitCode, String logSnippet) {
+  public ProcessFailureException(final int exitCode, final String logSnippet) {
     this.exitCode = exitCode;
     this.logSnippet = logSnippet;
   }
 
   public int getExitCode() {
-    return exitCode;
+    return this.exitCode;
   }
 
   public String getLogSnippet() {
diff --git a/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManager.java b/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManager.java
index d396560..59f8612 100644
--- a/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManager.java
+++ b/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManager.java
@@ -16,16 +16,6 @@
 
 package azkaban.jobtype;
 
-import java.io.File;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.log4j.Logger;
-
 import azkaban.jobExecutor.JavaProcessJob;
 import azkaban.jobExecutor.Job;
 import azkaban.jobExecutor.NoopJob;
@@ -37,10 +27,16 @@ import azkaban.jobExecutor.utils.JobExecutionException;
 import azkaban.utils.Props;
 import azkaban.utils.PropsUtils;
 import azkaban.utils.Utils;
+import java.io.File;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.util.ArrayList;
+import java.util.List;
+import org.apache.log4j.Logger;
 
 public class JobTypeManager {
-  private final String jobTypePluginDir; // the dir for jobtype plugins
-  private final ClassLoader parentLoader;
 
   public static final String DEFAULT_JOBTYPEPLUGINDIR = "plugins/jobtypes";
   // need jars.to.include property, will be loaded with user property
@@ -52,12 +48,13 @@ public class JobTypeManager {
   // common private properties for multiple plugins
   private static final String COMMONSYSCONFFILE = "commonprivate.properties";
   private static final Logger logger = Logger.getLogger(JobTypeManager.class);
-
+  private final String jobTypePluginDir; // the dir for jobtype plugins
+  private final ClassLoader parentLoader;
+  private final Props globalProperties;
   private JobTypePluginSet pluginSet;
-  private Props globalProperties;
 
-  public JobTypeManager(String jobtypePluginDir, Props globalProperties,
-      ClassLoader parentClassLoader) {
+  public JobTypeManager(final String jobtypePluginDir, final Props globalProperties,
+      final ClassLoader parentClassLoader) {
     this.jobTypePluginDir = jobtypePluginDir;
     this.parentLoader = parentClassLoader;
     this.globalProperties = globalProperties;
@@ -66,18 +63,18 @@ public class JobTypeManager {
   }
 
   public void loadPlugins() throws JobTypeManagerException {
-    JobTypePluginSet plugins = new JobTypePluginSet();
+    final JobTypePluginSet plugins = new JobTypePluginSet();
 
     loadDefaultTypes(plugins);
-    if (jobTypePluginDir != null) {
-      File pluginDir = new File(jobTypePluginDir);
+    if (this.jobTypePluginDir != null) {
+      final File pluginDir = new File(this.jobTypePluginDir);
       if (pluginDir.exists()) {
         logger
             .info("Job type plugin directory set. Loading extra job types from "
                 + pluginDir);
         try {
           loadPluginJobTypes(plugins);
-        } catch (Exception e) {
+        } catch (final Exception e) {
           logger.info("Plugin jobtypes failed to load. " + e.getCause(), e);
           throw new JobTypeManagerException(e);
         }
@@ -86,11 +83,11 @@ public class JobTypeManager {
 
     // Swap the plugin set. If exception is thrown, then plugin isn't swapped.
     synchronized (this) {
-      pluginSet = plugins;
+      this.pluginSet = plugins;
     }
   }
 
-  private void loadDefaultTypes(JobTypePluginSet plugins)
+  private void loadDefaultTypes(final JobTypePluginSet plugins)
       throws JobTypeManagerException {
     logger.info("Loading plugin default job types");
     plugins.addPluginClass("command", ProcessJob.class);
@@ -102,31 +99,31 @@ public class JobTypeManager {
   }
 
   // load Job Types from jobtype plugin dir
-  private void loadPluginJobTypes(JobTypePluginSet plugins)
+  private void loadPluginJobTypes(final JobTypePluginSet plugins)
       throws JobTypeManagerException {
-    File jobPluginsDir = new File(jobTypePluginDir);
+    final File jobPluginsDir = new File(this.jobTypePluginDir);
 
     if (!jobPluginsDir.exists()) {
-      logger.error("Job type plugin dir " + jobTypePluginDir
+      logger.error("Job type plugin dir " + this.jobTypePluginDir
           + " doesn't exist. Will not load any external plugins.");
       return;
     } else if (!jobPluginsDir.isDirectory()) {
       throw new JobTypeManagerException("Job type plugin dir "
-          + jobTypePluginDir + " is not a directory!");
+          + this.jobTypePluginDir + " is not a directory!");
     } else if (!jobPluginsDir.canRead()) {
       throw new JobTypeManagerException("Job type plugin dir "
-          + jobTypePluginDir + " is not readable!");
+          + this.jobTypePluginDir + " is not readable!");
     }
 
     // Load the common properties used by all jobs that are run
     Props commonPluginJobProps = null;
-    File commonJobPropsFile = new File(jobPluginsDir, COMMONCONFFILE);
+    final File commonJobPropsFile = new File(jobPluginsDir, COMMONCONFFILE);
     if (commonJobPropsFile.exists()) {
       logger.info("Common plugin job props file " + commonJobPropsFile
           + " found. Attempt to load.");
       try {
-        commonPluginJobProps = new Props(globalProperties, commonJobPropsFile);
-      } catch (IOException e) {
+        commonPluginJobProps = new Props(this.globalProperties, commonJobPropsFile);
+      } catch (final IOException e) {
         throw new JobTypeManagerException(
             "Failed to load common plugin job properties" + e.getCause());
       }
@@ -138,13 +135,13 @@ public class JobTypeManager {
 
     // Loads the common properties used by all plugins when loading
     Props commonPluginLoadProps = null;
-    File commonLoadPropsFile = new File(jobPluginsDir, COMMONSYSCONFFILE);
+    final File commonLoadPropsFile = new File(jobPluginsDir, COMMONSYSCONFFILE);
     if (commonLoadPropsFile.exists()) {
       logger.info("Common plugin load props file " + commonLoadPropsFile
           + " found. Attempt to load.");
       try {
         commonPluginLoadProps = new Props(null, commonLoadPropsFile);
-      } catch (IOException e) {
+      } catch (final IOException e) {
         throw new JobTypeManagerException(
             "Failed to load common plugin loader properties" + e.getCause());
       }
@@ -158,11 +155,11 @@ public class JobTypeManager {
     plugins.setCommonPluginLoadProps(commonPluginLoadProps);
 
     // Loading job types
-    for (File dir : jobPluginsDir.listFiles()) {
+    for (final File dir : jobPluginsDir.listFiles()) {
       if (dir.isDirectory() && dir.canRead()) {
         try {
           loadJobTypes(dir, plugins);
-        } catch (Exception e) {
+        } catch (final Exception e) {
           logger.error(
               "Failed to load jobtype " + dir.getName() + e.getMessage(), e);
           throw new JobTypeManagerException(e);
@@ -171,18 +168,17 @@ public class JobTypeManager {
     }
   }
 
-  @SuppressWarnings("unchecked")
-  private void loadJobTypes(File pluginDir, JobTypePluginSet plugins)
+  private void loadJobTypes(final File pluginDir, final JobTypePluginSet plugins)
       throws JobTypeManagerException {
     // Directory is the jobtypeName
-    String jobTypeName = pluginDir.getName();
+    final String jobTypeName = pluginDir.getName();
     logger.info("Loading plugin " + jobTypeName);
 
     Props pluginJobProps = null;
     Props pluginLoadProps = null;
 
-    File pluginJobPropsFile = new File(pluginDir, JOBTYPECONFFILE);
-    File pluginLoadPropsFile = new File(pluginDir, JOBTYPESYSCONFFILE);
+    final File pluginJobPropsFile = new File(pluginDir, JOBTYPECONFFILE);
+    final File pluginLoadPropsFile = new File(pluginDir, JOBTYPESYSCONFFILE);
 
     if (!pluginLoadPropsFile.exists()) {
       logger.info("Plugin load props file " + pluginLoadPropsFile
@@ -191,8 +187,8 @@ public class JobTypeManager {
     }
 
     try {
-      Props commonPluginJobProps = plugins.getCommonPluginJobProps();
-      Props commonPluginLoadProps = plugins.getCommonPluginLoadProps();
+      final Props commonPluginJobProps = plugins.getCommonPluginJobProps();
+      final Props commonPluginLoadProps = plugins.getCommonPluginLoadProps();
       if (pluginJobPropsFile.exists()) {
         pluginJobProps = new Props(commonPluginJobProps, pluginJobPropsFile);
       } else {
@@ -202,7 +198,7 @@ public class JobTypeManager {
       pluginLoadProps = new Props(commonPluginLoadProps, pluginLoadPropsFile);
       pluginLoadProps.put("plugin.dir", pluginDir.getAbsolutePath());
       pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error("pluginLoadProps to help with debugging: " + pluginLoadProps);
       throw new JobTypeManagerException("Failed to get jobtype properties"
           + e.getMessage(), e);
@@ -213,27 +209,26 @@ public class JobTypeManager {
       plugins.addPluginJobProps(jobTypeName, pluginJobProps);
     }
 
-    ClassLoader jobTypeLoader =
+    final ClassLoader jobTypeLoader =
         loadJobTypeClassLoader(pluginDir, jobTypeName, plugins);
-    String jobtypeClass = pluginLoadProps.get("jobtype.class");
+    final String jobtypeClass = pluginLoadProps.get("jobtype.class");
 
     Class<? extends Job> clazz = null;
     try {
       clazz = (Class<? extends Job>) jobTypeLoader.loadClass(jobtypeClass);
       plugins.addPluginClass(jobTypeName, clazz);
-    } catch (ClassNotFoundException e) {
+    } catch (final ClassNotFoundException e) {
       throw new JobTypeManagerException(e);
     }
 
     logger.info("Verifying job plugin " + jobTypeName);
     try {
-      Props fakeSysProps = new Props(pluginLoadProps);
-      Props fakeJobProps = new Props(pluginJobProps);
-      @SuppressWarnings("unused")
-      Job job =
+      final Props fakeSysProps = new Props(pluginLoadProps);
+      final Props fakeJobProps = new Props(pluginJobProps);
+      final Job job =
           (Job) Utils.callConstructor(clazz, "dummy", fakeSysProps,
               fakeJobProps, logger);
-    } catch (Throwable t) {
+    } catch (final Throwable t) {
       logger.info("Jobtype " + jobTypeName + " failed test!", t);
       throw new JobExecutionException(t);
     }
@@ -243,26 +238,21 @@ public class JobTypeManager {
 
   /**
    * Creates and loads all plugin resources (jars) into a ClassLoader
-   *
-   * @param pluginDir
-   * @param jobTypeName
-   * @param plugins
-   * @return
    */
-  private ClassLoader loadJobTypeClassLoader(File pluginDir,
-      String jobTypeName, JobTypePluginSet plugins) {
+  private ClassLoader loadJobTypeClassLoader(final File pluginDir,
+      final String jobTypeName, final JobTypePluginSet plugins) {
     // sysconf says what jars/confs to load
-    List<URL> resources = new ArrayList<URL>();
-    Props pluginLoadProps = plugins.getPluginLoaderProps(jobTypeName);
+    final List<URL> resources = new ArrayList<>();
+    final Props pluginLoadProps = plugins.getPluginLoaderProps(jobTypeName);
 
     try {
       // first global classpath
       logger.info("Adding global resources for " + jobTypeName);
-      List<String> typeGlobalClassPath =
+      final List<String> typeGlobalClassPath =
           pluginLoadProps.getStringList("jobtype.global.classpath", null, ",");
       if (typeGlobalClassPath != null) {
-        for (String jar : typeGlobalClassPath) {
-          URL cpItem = new File(jar).toURI().toURL();
+        for (final String jar : typeGlobalClassPath) {
+          final URL cpItem = new File(jar).toURI().toURL();
           if (!resources.contains(cpItem)) {
             logger.info("adding to classpath " + cpItem);
             resources.add(cpItem);
@@ -272,22 +262,22 @@ public class JobTypeManager {
 
       // type specific classpath
       logger.info("Adding type resources.");
-      List<String> typeClassPath =
+      final List<String> typeClassPath =
           pluginLoadProps.getStringList("jobtype.classpath", null, ",");
       if (typeClassPath != null) {
-        for (String jar : typeClassPath) {
-          URL cpItem = new File(jar).toURI().toURL();
+        for (final String jar : typeClassPath) {
+          final URL cpItem = new File(jar).toURI().toURL();
           if (!resources.contains(cpItem)) {
             logger.info("adding to classpath " + cpItem);
             resources.add(cpItem);
           }
         }
       }
-      List<String> jobtypeLibDirs =
+      final List<String> jobtypeLibDirs =
           pluginLoadProps.getStringList("jobtype.lib.dir", null, ",");
       if (jobtypeLibDirs != null) {
-        for (String libDir : jobtypeLibDirs) {
-          for (File f : new File(libDir).listFiles()) {
+        for (final String libDir : jobtypeLibDirs) {
+          for (final File f : new File(libDir).listFiles()) {
             if (f.getName().endsWith(".jar")) {
               resources.add(f.toURI().toURL());
               logger.info("adding to classpath " + f.toURI().toURL());
@@ -297,26 +287,28 @@ public class JobTypeManager {
       }
 
       logger.info("Adding type override resources.");
-      for (File f : pluginDir.listFiles()) {
+      for (final File f : pluginDir.listFiles()) {
         if (f.getName().endsWith(".jar")) {
           resources.add(f.toURI().toURL());
           logger.info("adding to classpath " + f.toURI().toURL());
         }
       }
 
-    } catch (MalformedURLException e) {
+    } catch (final MalformedURLException e) {
       throw new JobTypeManagerException(e);
     }
 
     // each job type can have a different class loader
-    logger.info(String.format("Classpath for plugin[dir: %s, JobType: %s]: %s", pluginDir, jobTypeName, resources));
-    ClassLoader jobTypeLoader =
+    logger.info(String
+        .format("Classpath for plugin[dir: %s, JobType: %s]: %s", pluginDir, jobTypeName,
+            resources));
+    final ClassLoader jobTypeLoader =
         new URLClassLoader(resources.toArray(new URL[resources.size()]),
-            parentLoader);
+            this.parentLoader);
     return jobTypeLoader;
   }
 
-  public Job buildJobExecutor(String jobId, Props jobProps, Logger logger)
+  public Job buildJobExecutor(final String jobId, Props jobProps, final Logger logger)
       throws JobTypeManagerException {
     // This is final because during build phase, you should never need to swap
     // the pluginSet for safety reasons
@@ -324,7 +316,7 @@ public class JobTypeManager {
 
     Job job = null;
     try {
-      String jobType = jobProps.getString("type");
+      final String jobType = jobProps.getString("type");
       if (jobType == null || jobType.length() == 0) {
         /* throw an exception when job name is null or empty */
         throw new JobExecutionException(String.format(
@@ -333,10 +325,10 @@ public class JobTypeManager {
 
       logger.info("Building " + jobType + " job executor. ");
 
-      Class<? extends Object> executorClass = pluginSet.getPluginClass(jobType);
+      final Class<? extends Object> executorClass = pluginSet.getPluginClass(jobType);
       if (executorClass == null) {
         throw new JobExecutionException(String.format("Job type '" + jobType
-            + "' is unrecognized. Could not construct job[%s] of type[%s].",
+                + "' is unrecognized. Could not construct job[%s] of type[%s].",
             jobProps, jobType));
       }
 
@@ -344,11 +336,11 @@ public class JobTypeManager {
       // For default jobtypes, even though they don't have pluginJobProps configured,
       // they still need to load properties from common.properties file if it's present
       // because common.properties file is global to all jobtypes.
-      if(pluginJobProps == null) {
+      if (pluginJobProps == null) {
         pluginJobProps = pluginSet.getCommonPluginJobProps();
       }
       if (pluginJobProps != null) {
-        for (String k : pluginJobProps.getKeySet()) {
+        for (final String k : pluginJobProps.getKeySet()) {
           if (!jobProps.containsKey(k)) {
             jobProps.put(k, pluginJobProps.get(k));
           }
@@ -363,19 +355,20 @@ public class JobTypeManager {
         // pluginSet.getCommonPluginLoadProps() will return null if there is no plugins directory.
         // hence assigning default Props() if that's the case
         pluginLoadProps = pluginSet.getCommonPluginLoadProps();
-        if(pluginLoadProps == null)
-        	pluginLoadProps = new Props();
+        if (pluginLoadProps == null) {
+          pluginLoadProps = new Props();
+        }
       }
 
       job =
           (Job) Utils.callConstructor(executorClass, jobId, pluginLoadProps,
               jobProps, logger);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error("Failed to build job executor for job " + jobId
           + e.getMessage());
       throw new JobTypeManagerException("Failed to build job executor for job "
           + jobId, e);
-    } catch (Throwable t) {
+    } catch (final Throwable t) {
       logger.error(
           "Failed to build job executor for job " + jobId + t.getMessage(), t);
       throw new JobTypeManagerException("Failed to build job executor for job "
diff --git a/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManagerException.java b/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManagerException.java
index d3cddb3..d841878 100644
--- a/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManagerException.java
+++ b/azkaban-common/src/main/java/azkaban/jobtype/JobTypeManagerException.java
@@ -17,17 +17,18 @@
 package azkaban.jobtype;
 
 public class JobTypeManagerException extends RuntimeException {
+
   private static final long serialVersionUID = 1L;
 
-  public JobTypeManagerException(String message) {
+  public JobTypeManagerException(final String message) {
     super(message);
   }
 
-  public JobTypeManagerException(Throwable cause) {
+  public JobTypeManagerException(final Throwable cause) {
     super(cause);
   }
 
-  public JobTypeManagerException(String message, Throwable cause) {
+  public JobTypeManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/jobtype/JobTypePluginSet.java b/azkaban-common/src/main/java/azkaban/jobtype/JobTypePluginSet.java
index cb687dd..ab5cf93 100644
--- a/azkaban-common/src/main/java/azkaban/jobtype/JobTypePluginSet.java
+++ b/azkaban-common/src/main/java/azkaban/jobtype/JobTypePluginSet.java
@@ -15,11 +15,10 @@
  */
 package azkaban.jobtype;
 
-import java.util.HashMap;
-import java.util.Map;
-
 import azkaban.jobExecutor.Job;
 import azkaban.utils.Props;
+import java.util.HashMap;
+import java.util.Map;
 
 /**
  * Container for job type plugins
@@ -31,9 +30,10 @@ import azkaban.utils.Props;
  * populated and controlled by the JobTypeManager
  */
 public class JobTypePluginSet {
-  private Map<String, Class<? extends Job>> jobToClass;
-  private Map<String, Props> pluginJobPropsMap;
-  private Map<String, Props> pluginLoadPropsMap;
+
+  private final Map<String, Class<? extends Job>> jobToClass;
+  private final Map<String, Props> pluginJobPropsMap;
+  private final Map<String, Props> pluginLoadPropsMap;
 
   private Props commonJobProps;
   private Props commonLoadProps;
@@ -42,110 +42,91 @@ public class JobTypePluginSet {
    * Base constructor
    */
   public JobTypePluginSet() {
-    jobToClass = new HashMap<String, Class<? extends Job>>();
-    pluginJobPropsMap = new HashMap<String, Props>();
-    pluginLoadPropsMap = new HashMap<String, Props>();
+    this.jobToClass = new HashMap<>();
+    this.pluginJobPropsMap = new HashMap<>();
+    this.pluginLoadPropsMap = new HashMap<>();
   }
 
   /**
    * Copy constructor
-   *
-   * @param clone
    */
-  public JobTypePluginSet(JobTypePluginSet clone) {
-    jobToClass = new HashMap<String, Class<? extends Job>>(clone.jobToClass);
-    pluginJobPropsMap = new HashMap<String, Props>(clone.pluginJobPropsMap);
-    pluginLoadPropsMap = new HashMap<String, Props>(clone.pluginLoadPropsMap);
-    commonJobProps = clone.commonJobProps;
-    commonLoadProps = clone.commonLoadProps;
+  public JobTypePluginSet(final JobTypePluginSet clone) {
+    this.jobToClass = new HashMap<>(clone.jobToClass);
+    this.pluginJobPropsMap = new HashMap<>(clone.pluginJobPropsMap);
+    this.pluginLoadPropsMap = new HashMap<>(clone.pluginLoadPropsMap);
+    this.commonJobProps = clone.commonJobProps;
+    this.commonLoadProps = clone.commonLoadProps;
   }
 
   /**
-   * Sets the common properties shared in every jobtype
-   *
-   * @param commonJobProps
+   * Gets common properties for every jobtype
    */
-  public void setCommonPluginJobProps(Props commonJobProps) {
-    this.commonJobProps = commonJobProps;
+  public Props getCommonPluginJobProps() {
+    return this.commonJobProps;
   }
 
   /**
-   * Sets the common properties used to load every plugin
-   *
-   * @param commonLoadProps
+   * Sets the common properties shared in every jobtype
    */
-  public void setCommonPluginLoadProps(Props commonLoadProps) {
-    this.commonLoadProps = commonLoadProps;
+  public void setCommonPluginJobProps(final Props commonJobProps) {
+    this.commonJobProps = commonJobProps;
   }
 
   /**
-   * Gets common properties for every jobtype
-   *
-   * @return
+   * Gets the common properties used to load a plugin
    */
-  public Props getCommonPluginJobProps() {
-    return commonJobProps;
+  public Props getCommonPluginLoadProps() {
+    return this.commonLoadProps;
   }
 
   /**
-   * Gets the common properties used to load a plugin
-   *
-   * @return
+   * Sets the common properties used to load every plugin
    */
-  public Props getCommonPluginLoadProps() {
-    return commonLoadProps;
+  public void setCommonPluginLoadProps(final Props commonLoadProps) {
+    this.commonLoadProps = commonLoadProps;
   }
 
   /**
    * Get the properties for a jobtype used to setup and load a plugin
-   *
-   * @param jobTypeName
-   * @return
    */
-  public Props getPluginLoaderProps(String jobTypeName) {
-    return pluginLoadPropsMap.get(jobTypeName);
+  public Props getPluginLoaderProps(final String jobTypeName) {
+    return this.pluginLoadPropsMap.get(jobTypeName);
   }
 
   /**
    * Get the properties that will be given to the plugin as default job
    * properties.
-   *
-   * @param jobTypeName
-   * @return
    */
-  public Props getPluginJobProps(String jobTypeName) {
-    return pluginJobPropsMap.get(jobTypeName);
+  public Props getPluginJobProps(final String jobTypeName) {
+    return this.pluginJobPropsMap.get(jobTypeName);
   }
 
   /**
    * Gets the plugin job runner class
-   *
-   * @param jobTypeName
-   * @return
    */
-  public Class<? extends Job> getPluginClass(String jobTypeName) {
-    return jobToClass.get(jobTypeName);
+  public Class<? extends Job> getPluginClass(final String jobTypeName) {
+    return this.jobToClass.get(jobTypeName);
   }
 
   /**
    * Adds plugin jobtype class
    */
-  public void addPluginClass(String jobTypeName,
-      Class<? extends Job> jobTypeClass) {
-    jobToClass.put(jobTypeName, jobTypeClass);
+  public void addPluginClass(final String jobTypeName,
+      final Class<? extends Job> jobTypeClass) {
+    this.jobToClass.put(jobTypeName, jobTypeClass);
   }
 
   /**
    * Adds plugin job properties used as default runtime properties
    */
-  public void addPluginJobProps(String jobTypeName, Props props) {
-    pluginJobPropsMap.put(jobTypeName, props);
+  public void addPluginJobProps(final String jobTypeName, final Props props) {
+    this.pluginJobPropsMap.put(jobTypeName, props);
   }
 
   /**
    * Adds plugin load properties used to load the plugin
    */
-  public void addPluginLoadProps(String jobTypeName, Props props) {
-    pluginLoadPropsMap.put(jobTypeName, props);
+  public void addPluginLoadProps(final String jobTypeName, final Props props) {
+    this.pluginLoadPropsMap.put(jobTypeName, props);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/metric/AbstractMetric.java b/azkaban-common/src/main/java/azkaban/metric/AbstractMetric.java
index 790336d..2427a4c 100644
--- a/azkaban-common/src/main/java/azkaban/metric/AbstractMetric.java
+++ b/azkaban-common/src/main/java/azkaban/metric/AbstractMetric.java
@@ -20,9 +20,11 @@ import org.apache.log4j.Logger;
 
 /**
  * Abstract class for Metric
+ *
  * @param <T> Type of Value of a given metric
  */
-public abstract class AbstractMetric<T> implements IMetric<T>, Cloneable{
+public abstract class AbstractMetric<T> implements IMetric<T>, Cloneable {
+
   protected static final Logger logger = Logger.getLogger(MetricReportManager.class);
   protected String name;
   protected T value;
@@ -35,58 +37,62 @@ public abstract class AbstractMetric<T> implements IMetric<T>, Cloneable{
    * @param initialValue Initial Value of a metric
    * @param manager Metric Manager whom a metric will report to
    */
-  protected AbstractMetric(String metricName, String metricType, T initialValue, MetricReportManager manager) {
-    name = metricName;
-    type = metricType;
-    value = initialValue;
-    metricManager = manager;
+  protected AbstractMetric(final String metricName, final String metricType, final T initialValue,
+      final MetricReportManager manager) {
+    this.name = metricName;
+    this.type = metricType;
+    this.value = initialValue;
+    this.metricManager = manager;
   }
 
   /**
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetric#getName()
    */
   @Override
   public String getName() {
-    return name;
+    return this.name;
   }
 
   /**
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetric#getValueType()
    */
   @Override
   public String getValueType() {
-    return type;
+    return this.type;
   }
 
   /**
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetric#updateMetricManager(azkaban.metric.MetricReportManager)
    */
   @Override
   public void updateMetricManager(final MetricReportManager manager) {
-    metricManager = manager;
+    this.metricManager = manager;
   }
 
   /**
    * {@inheritDoc}
-   * @throws CloneNotSupportedException
+   *
    * @see azkaban.metric.IMetric#getSnapshot()
    */
   @Override
-  @SuppressWarnings("unchecked")
-  public IMetric<T> getSnapshot() throws CloneNotSupportedException{
+  public IMetric<T> getSnapshot() throws CloneNotSupportedException {
     return (IMetric<T>) this.clone();
   }
 
   /**
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetric#getValue()
    */
   @Override
   public T getValue() {
-    return value;
+    return this.value;
   }
 
   /**
@@ -94,15 +100,17 @@ public abstract class AbstractMetric<T> implements IMetric<T>, Cloneable{
    * Metric is free to call this method as per implementation.
    * Timer based or Azkaban events are the most common implementation
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetric#notifyManager()
    */
   @Override
   public void notifyManager() {
     logger.debug(String.format("Notifying Manager for %s", this.getClass().getName()));
     try {
-      metricManager.reportMetric(this);
-    } catch (Throwable ex) {
-      logger.error(String.format("Metric Manager is not set for %s metric", this.getClass().getName()), ex);
+      this.metricManager.reportMetric(this);
+    } catch (final Throwable ex) {
+      logger.error(
+          String.format("Metric Manager is not set for %s metric", this.getClass().getName()), ex);
     }
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/metric/GangliaMetricEmitter.java b/azkaban-common/src/main/java/azkaban/metric/GangliaMetricEmitter.java
index 5e954b7..34fce3a 100644
--- a/azkaban-common/src/main/java/azkaban/metric/GangliaMetricEmitter.java
+++ b/azkaban-common/src/main/java/azkaban/metric/GangliaMetricEmitter.java
@@ -16,8 +16,6 @@
 
 package azkaban.metric;
 
-import org.apache.commons.collections.bag.SynchronizedBag;
-
 import azkaban.utils.Props;
 
 
@@ -25,23 +23,27 @@ import azkaban.utils.Props;
  * MetricEmitter implementation to report metric to a ganglia gmetric process
  */
 public class GangliaMetricEmitter implements IMetricEmitter {
+
   private static final String GANGLIA_METRIC_REPORTER_PATH = "azkaban.metric.ganglia.path";
-  private String gmetricPath;
+  private final String gmetricPath;
 
   /**
    * @param azkProps Azkaban Properties
    */
-  public GangliaMetricEmitter(Props azkProps) {
-    gmetricPath = azkProps.get(GANGLIA_METRIC_REPORTER_PATH);
+  public GangliaMetricEmitter(final Props azkProps) {
+    this.gmetricPath = azkProps.get(GANGLIA_METRIC_REPORTER_PATH);
   }
 
-  private String buildCommand(IMetric<?> metric) {
+  private String buildCommand(final IMetric<?> metric) {
     String cmd = null;
 
     synchronized (metric) {
       cmd =
-          String.format("%s -t %s -n %s -v %s", gmetricPath, metric.getValueType(), metric.getName(), metric.getValue()
-              .toString());
+          String
+              .format("%s -t %s -n %s -v %s", this.gmetricPath, metric.getValueType(),
+                  metric.getName(),
+                  metric.getValue()
+                      .toString());
     }
 
     return cmd;
@@ -50,22 +52,23 @@ public class GangliaMetricEmitter implements IMetricEmitter {
   /**
    * Report metric by executing command line interface of gmetrics
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetricEmitter#reportMetric(azkaban.metric.IMetric)
    */
   @Override
   public void reportMetric(final IMetric<?> metric) throws MetricException {
-    String gangliaCommand = buildCommand(metric);
+    final String gangliaCommand = buildCommand(metric);
 
     if (gangliaCommand != null) {
       // executes shell command to report metric to ganglia dashboard
       try {
-        Process emission = Runtime.getRuntime().exec(gangliaCommand);
-        int exitCode;
+        final Process emission = Runtime.getRuntime().exec(gangliaCommand);
+        final int exitCode;
         exitCode = emission.waitFor();
         if (exitCode != 0) {
           throw new MetricException("Failed to report metric using gmetric");
         }
-      } catch (Exception e) {
+      } catch (final Exception e) {
         throw new MetricException("Failed to report metric using gmetric");
       }
     } else {
diff --git a/azkaban-common/src/main/java/azkaban/metric/IMetric.java b/azkaban-common/src/main/java/azkaban/metric/IMetric.java
index 188b399..e760b52 100644
--- a/azkaban-common/src/main/java/azkaban/metric/IMetric.java
+++ b/azkaban-common/src/main/java/azkaban/metric/IMetric.java
@@ -18,13 +18,20 @@ package azkaban.metric;
 
 /**
  * Interface of any Metric
+ *
  * @param <T> Type of Value of a given metric
  */
 public interface IMetric<T> {
+
   String getName();
+
   String getValueType();
+
   void updateMetricManager(final MetricReportManager manager);
+
   void notifyManager();
+
   T getValue();
+
   IMetric<T> getSnapshot() throws CloneNotSupportedException;
 }
diff --git a/azkaban-common/src/main/java/azkaban/metric/IMetricEmitter.java b/azkaban-common/src/main/java/azkaban/metric/IMetricEmitter.java
index bdc874b..3106957 100644
--- a/azkaban-common/src/main/java/azkaban/metric/IMetricEmitter.java
+++ b/azkaban-common/src/main/java/azkaban/metric/IMetricEmitter.java
@@ -20,6 +20,8 @@ package azkaban.metric;
  * Interface for metric emitters
  */
 public interface IMetricEmitter {
+
   void reportMetric(final IMetric<?> metric) throws MetricException;
+
   void purgeAllData() throws MetricException;
 }
\ No newline at end of file
diff --git a/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryHistoryNode.java b/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryHistoryNode.java
index b8f1f6e..a988ecb 100644
--- a/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryHistoryNode.java
+++ b/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryHistoryNode.java
@@ -22,23 +22,23 @@ import java.util.Date;
  * A snapshot of metric's value
  */
 public class InMemoryHistoryNode {
-  private Object value;
-  private Date date;
+
+  private final Object value;
+  private final Date date;
 
   /**
    * Takes snapshot of the metric with a given value
-   * @param val
    */
   public InMemoryHistoryNode(final Object val) {
-    value = val;
-    date = new Date();
+    this.value = val;
+    this.date = new Date();
   }
 
   public Object getValue() {
-    return value;
+    return this.value;
   }
 
   public Date getTimestamp() {
-    return date;
+    return this.date;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryMetricEmitter.java b/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryMetricEmitter.java
index e93d11b..66a66bf 100644
--- a/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryMetricEmitter.java
+++ b/azkaban-common/src/main/java/azkaban/metric/inmemoryemitter/InMemoryMetricEmitter.java
@@ -16,6 +16,10 @@
 
 package azkaban.metric.inmemoryemitter;
 
+import azkaban.metric.IMetric;
+import azkaban.metric.IMetricEmitter;
+import azkaban.metric.MetricException;
+import azkaban.utils.Props;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.Iterator;
@@ -23,15 +27,8 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
-
-import org.apache.log4j.Logger;
-
-import azkaban.metric.IMetric;
-import azkaban.metric.IMetricEmitter;
-import azkaban.metric.MetricException;
-import azkaban.utils.Props;
-
 import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
+import org.apache.log4j.Logger;
 
 
 /**
@@ -39,18 +36,17 @@ import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics;
  * This is also the default metric emitter and used by /stats servlet
  */
 public class InMemoryMetricEmitter implements IMetricEmitter {
-  protected static final Logger logger = Logger.getLogger(InMemoryMetricEmitter.class);
 
-  /**
-   * Data structure to keep track of snapshots
-   */
-  protected Map<String, LinkedList<InMemoryHistoryNode>> historyListMapping;
+  protected static final Logger logger = Logger.getLogger(InMemoryMetricEmitter.class);
   private static final String INMEMORY_METRIC_REPORTER_WINDOW = "azkaban.metric.inmemory.interval";
   private static final String INMEMORY_METRIC_NUM_INSTANCES = "azkaban.metric.inmemory.maxinstances";
   private static final String INMEMORY_METRIC_STANDARDDEVIATION_FACTOR =
       "azkaban.metric.inmemory.standardDeviationFactor";
-
-  private double standardDeviationFactor;
+  private final double standardDeviationFactor;
+  /**
+   * Data structure to keep track of snapshots
+   */
+  protected Map<String, LinkedList<InMemoryHistoryNode>> historyListMapping;
   /**
    * Interval (in millisecond) from today for which we should maintain the in memory snapshots
    */
@@ -63,64 +59,67 @@ public class InMemoryMetricEmitter implements IMetricEmitter {
   /**
    * @param azkProps Azkaban Properties
    */
-  public InMemoryMetricEmitter(Props azkProps) {
-    historyListMapping = new HashMap<String, LinkedList<InMemoryHistoryNode>>();
-    timeWindow = azkProps.getLong(INMEMORY_METRIC_REPORTER_WINDOW, 60 * 60 * 24 * 7 * 1000);
-    numInstances = azkProps.getLong(INMEMORY_METRIC_NUM_INSTANCES, 50);
-    standardDeviationFactor = azkProps.getDouble(INMEMORY_METRIC_STANDARDDEVIATION_FACTOR, 2);
+  public InMemoryMetricEmitter(final Props azkProps) {
+    this.historyListMapping = new HashMap<>();
+    this.timeWindow = azkProps.getLong(INMEMORY_METRIC_REPORTER_WINDOW, 60 * 60 * 24 * 7 * 1000);
+    this.numInstances = azkProps.getLong(INMEMORY_METRIC_NUM_INSTANCES, 50);
+    this.standardDeviationFactor = azkProps.getDouble(INMEMORY_METRIC_STANDARDDEVIATION_FACTOR, 2);
   }
 
   /**
    * Update reporting interval
+   *
    * @param val interval in milli seconds
    */
-  public synchronized void setReportingInterval(long val) {
-    timeWindow = val;
+  public synchronized void setReportingInterval(final long val) {
+    this.timeWindow = val;
   }
 
   /**
    * Set number of /stats servlet display points
-   * @param num
    */
-  public void setReportingInstances(long num) {
-    numInstances = num;
+  public void setReportingInstances(final long num) {
+    this.numInstances = num;
   }
 
   /**
    * Ingest metric in snapshot data structure while maintaining interval
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetricEmitter#reportMetric(azkaban.metric.IMetric)
    */
   @Override
   public void reportMetric(final IMetric<?> metric) throws MetricException {
-    String metricName = metric.getName();
-    if (!historyListMapping.containsKey(metricName)) {
+    final String metricName = metric.getName();
+    if (!this.historyListMapping.containsKey(metricName)) {
       logger.info("First time capturing metric: " + metricName);
-      historyListMapping.put(metricName, new LinkedList<InMemoryHistoryNode>());
+      this.historyListMapping.put(metricName, new LinkedList<>());
     }
-    synchronized (historyListMapping.get(metricName)) {
+    synchronized (this.historyListMapping.get(metricName)) {
       logger.debug("Ingesting metric: " + metricName);
-      historyListMapping.get(metricName).add(new InMemoryHistoryNode(metric.getValue()));
-      cleanUsingTime(metricName, historyListMapping.get(metricName).peekLast().getTimestamp());
+      this.historyListMapping.get(metricName).add(new InMemoryHistoryNode(metric.getValue()));
+      cleanUsingTime(metricName, this.historyListMapping.get(metricName).peekLast().getTimestamp());
     }
   }
 
   /**
    * Get snapshots for a given metric at a given time
+   *
    * @param metricName name of the metric
    * @param from Start date
    * @param to end date
    * @param useStats get statistically significant points only
    * @return List of snapshots
    */
-  public List<InMemoryHistoryNode> getMetrics(final String metricName, final Date from, final Date to,
+  public List<InMemoryHistoryNode> getMetrics(final String metricName, final Date from,
+      final Date to,
       final Boolean useStats) throws ClassCastException {
-    LinkedList<InMemoryHistoryNode> selectedLists = new LinkedList<InMemoryHistoryNode>();
-    if (historyListMapping.containsKey(metricName)) {
+    final LinkedList<InMemoryHistoryNode> selectedLists = new LinkedList<>();
+    if (this.historyListMapping.containsKey(metricName)) {
 
       logger.debug("selecting snapshots within time frame");
-      synchronized (historyListMapping.get(metricName)) {
-        for (InMemoryHistoryNode node : historyListMapping.get(metricName)) {
+      synchronized (this.historyListMapping.get(metricName)) {
+        for (final InMemoryHistoryNode node : this.historyListMapping.get(metricName)) {
           if (node.getTimestamp().after(from) && node.getTimestamp().before(to)) {
             selectedLists.add(node);
           }
@@ -143,30 +142,33 @@ public class InMemoryMetricEmitter implements IMetricEmitter {
 
   /**
    * filter snapshots using statistically significant points only
+   *
    * @param selectedLists list of snapshots
    */
   private void statBasedSelectMetricHistory(final LinkedList<InMemoryHistoryNode> selectedLists)
       throws ClassCastException {
     logger.debug("selecting snapshots which are far away from mean value");
-    DescriptiveStatistics descStats = getDescriptiveStatistics(selectedLists);
-    Double mean = descStats.getMean();
-    Double std = descStats.getStandardDeviation();
+    final DescriptiveStatistics descStats = getDescriptiveStatistics(selectedLists);
+    final Double mean = descStats.getMean();
+    final Double std = descStats.getStandardDeviation();
 
-    Iterator<InMemoryHistoryNode> ite = selectedLists.iterator();
+    final Iterator<InMemoryHistoryNode> ite = selectedLists.iterator();
     while (ite.hasNext()) {
-      InMemoryHistoryNode currentNode = ite.next();
-      double value = ((Number) currentNode.getValue()).doubleValue();
+      final InMemoryHistoryNode currentNode = ite.next();
+      final double value = ((Number) currentNode.getValue()).doubleValue();
       // remove all elements which lies in 95% value band
-      if (value < mean + standardDeviationFactor * std && value > mean - standardDeviationFactor * std) {
+      if (value < mean + this.standardDeviationFactor * std
+          && value > mean - this.standardDeviationFactor * std) {
         ite.remove();
       }
     }
   }
 
-  private DescriptiveStatistics getDescriptiveStatistics(final LinkedList<InMemoryHistoryNode> selectedLists)
+  private DescriptiveStatistics getDescriptiveStatistics(
+      final LinkedList<InMemoryHistoryNode> selectedLists)
       throws ClassCastException {
-    DescriptiveStatistics descStats = new DescriptiveStatistics();
-    for (InMemoryHistoryNode node : selectedLists) {
+    final DescriptiveStatistics descStats = new DescriptiveStatistics();
+    for (final InMemoryHistoryNode node : selectedLists) {
       descStats.addValue(((Number) node.getValue()).doubleValue());
     }
     return descStats;
@@ -174,14 +176,15 @@ public class InMemoryMetricEmitter implements IMetricEmitter {
 
   /**
    * filter snapshots by evenly selecting points across the interval
+   *
    * @param selectedLists list of snapshots
    */
   private void generalSelectMetricHistory(final LinkedList<InMemoryHistoryNode> selectedLists) {
     logger.debug("selecting snapshots evenly from across the time interval");
-    if (selectedLists.size() > numInstances) {
-      double step = (double) selectedLists.size() / numInstances;
+    if (selectedLists.size() > this.numInstances) {
+      final double step = (double) selectedLists.size() / this.numInstances;
       long nextIndex = 0, currentIndex = 0, numSelectedInstances = 1;
-      Iterator<InMemoryHistoryNode> ite = selectedLists.iterator();
+      final Iterator<InMemoryHistoryNode> ite = selectedLists.iterator();
       while (ite.hasNext()) {
         ite.next();
         if (currentIndex == nextIndex) {
@@ -197,27 +200,31 @@ public class InMemoryMetricEmitter implements IMetricEmitter {
 
   /**
    * Remove snapshots to maintain reporting interval
+   *
    * @param metricName Name of the metric
    * @param firstAllowedDate End date of the interval
    */
   private void cleanUsingTime(final String metricName, final Date firstAllowedDate) {
-    if (historyListMapping.containsKey(metricName) && historyListMapping.get(metricName) != null) {
-      synchronized (historyListMapping.get(metricName)) {
+    if (this.historyListMapping.containsKey(metricName)
+        && this.historyListMapping.get(metricName) != null) {
+      synchronized (this.historyListMapping.get(metricName)) {
 
-        InMemoryHistoryNode firstNode = historyListMapping.get(metricName).peekFirst();
+        InMemoryHistoryNode firstNode = this.historyListMapping.get(metricName).peekFirst();
         long localCopyOfTimeWindow = 0;
 
         // go ahead for clean up using latest possible value of interval
         // any interval change will not affect on going clean up
         synchronized (this) {
-          localCopyOfTimeWindow = timeWindow;
+          localCopyOfTimeWindow = this.timeWindow;
         }
 
         // removing objects older than Interval time from firstAllowedDate
         while (firstNode != null
-            && TimeUnit.MILLISECONDS.toMillis(firstAllowedDate.getTime() - firstNode.getTimestamp().getTime()) > localCopyOfTimeWindow) {
-          historyListMapping.get(metricName).removeFirst();
-          firstNode = historyListMapping.get(metricName).peekFirst();
+            && TimeUnit.MILLISECONDS
+            .toMillis(firstAllowedDate.getTime() - firstNode.getTimestamp().getTime())
+            > localCopyOfTimeWindow) {
+          this.historyListMapping.get(metricName).removeFirst();
+          firstNode = this.historyListMapping.get(metricName).peekFirst();
         }
       }
     }
@@ -226,10 +233,11 @@ public class InMemoryMetricEmitter implements IMetricEmitter {
   /**
    * Clear snapshot data structure
    * {@inheritDoc}
+   *
    * @see azkaban.metric.IMetricEmitter#purgeAllData()
    */
   @Override
   public void purgeAllData() throws MetricException {
-    historyListMapping.clear();
+    this.historyListMapping.clear();
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/metric/MetricException.java b/azkaban-common/src/main/java/azkaban/metric/MetricException.java
index 6decb4a..83aed4a 100644
--- a/azkaban-common/src/main/java/azkaban/metric/MetricException.java
+++ b/azkaban-common/src/main/java/azkaban/metric/MetricException.java
@@ -20,17 +20,18 @@ package azkaban.metric;
  * Exception for Azkaban's Metric Component
  */
 public class MetricException extends Exception {
+
   private static final long serialVersionUID = 1L;
 
-  public MetricException(String message) {
+  public MetricException(final String message) {
     super(message);
   }
 
-  public MetricException(Throwable cause) {
+  public MetricException(final Throwable cause) {
     super(cause);
   }
 
-  public MetricException(String message, Throwable cause) {
+  public MetricException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/metric/MetricReportManager.java b/azkaban-common/src/main/java/azkaban/metric/MetricReportManager.java
index 25b735c..913e2e3 100644
--- a/azkaban-common/src/main/java/azkaban/metric/MetricReportManager.java
+++ b/azkaban-common/src/main/java/azkaban/metric/MetricReportManager.java
@@ -21,7 +21,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-
 import org.apache.log4j.Logger;
 
 
@@ -35,34 +34,33 @@ import org.apache.log4j.Logger;
  * </ul></p>
  */
 public class MetricReportManager {
+
   /**
    * Maximum number of metrics reporting threads
    */
   private static final int MAX_EMITTER_THREADS = 4;
   private static final Logger logger = Logger.getLogger(MetricReportManager.class);
-
+  // Singleton variable
+  private static volatile MetricReportManager instance = null;
+  private static volatile boolean isManagerEnabled;
   /**
    * List of all the metrics that Azkaban is tracking
    * Manager is not concerned with type of metric as long as it honors IMetric contracts
    */
-  private List<IMetric<?>> metrics;
-
+  private final List<IMetric<?>> metrics;
   /**
    * List of all the emitter listening all the metrics
    * Manager is not concerned with how emitter is reporting value.
    * Manager is only responsible to notify all emitters whenever an IMetric wants to be notified
    */
-  private List<IMetricEmitter> metricEmitters;
-  private ExecutorService executorService;
-  // Singleton variable
-  private static volatile MetricReportManager instance = null;
-  private static volatile boolean isManagerEnabled;
+  private final List<IMetricEmitter> metricEmitters;
+  private final ExecutorService executorService;
 
   private MetricReportManager() {
     logger.debug("Instantiating Metric Manager");
-    executorService = Executors.newFixedThreadPool(MAX_EMITTER_THREADS);
-    metrics = new ArrayList<IMetric<?>>();
-    metricEmitters = new LinkedList<IMetricEmitter>();
+    this.executorService = Executors.newFixedThreadPool(MAX_EMITTER_THREADS);
+    this.metrics = new ArrayList<>();
+    this.metricEmitters = new LinkedList<>();
     enableManager();
   }
 
@@ -107,59 +105,61 @@ public class MetricReportManager {
         synchronized (metric) {
           metricSnapshot = metric.getSnapshot();
         }
-        logger.debug(String.format("Submitting %s metric for metric emission pool", metricSnapshot.getName()));
+        logger.debug(String
+            .format("Submitting %s metric for metric emission pool", metricSnapshot.getName()));
         // report to all emitters
-        for (final IMetricEmitter metricEmitter : metricEmitters) {
-          executorService.submit(() -> {
+        for (final IMetricEmitter metricEmitter : this.metricEmitters) {
+          this.executorService.submit(() -> {
             try {
               metricEmitter.reportMetric(metricSnapshot);
-            } catch (Exception ex) {
-              logger.error(String.format("Failed to report %s metric due to ", metricSnapshot.getName()), ex);
+            } catch (final Exception ex) {
+              logger.error(
+                  String.format("Failed to report %s metric due to ", metricSnapshot.getName()),
+                  ex);
             }
           });
         }
-      } catch (CloneNotSupportedException ex) {
-        logger.error(String.format("Failed to take snapshot for %s metric", metric.getClass().getName()), ex);
+      } catch (final CloneNotSupportedException ex) {
+        logger.error(
+            String.format("Failed to take snapshot for %s metric", metric.getClass().getName()),
+            ex);
       }
     }
   }
 
   /**
    * Add a metric emitter to report metric
-   * @param emitter
    */
   public void addMetricEmitter(final IMetricEmitter emitter) {
-    metricEmitters.add(emitter);
+    this.metricEmitters.add(emitter);
   }
 
   /**
    * remove a metric emitter
-   * @param emitter
    */
   public void removeMetricEmitter(final IMetricEmitter emitter) {
-    metricEmitters.remove(emitter);
+    this.metricEmitters.remove(emitter);
   }
 
   /**
    * Get all the metric emitters
-   * @return
    */
   public List<IMetricEmitter> getMetricEmitters() {
-    return metricEmitters;
+    return this.metricEmitters;
   }
 
   /**
    * Add a metric to be managed by Metric Manager
-   * @param metric
    */
   public void addMetric(final IMetric<?> metric) {
     // metric null or already present
-    if(metric == null)
+    if (metric == null) {
       throw new IllegalArgumentException("Cannot add a null metric");
+    }
 
     if (getMetricFromName(metric.getName()) == null) {
       logger.debug(String.format("Adding %s metric in Metric Manager", metric.getName()));
-      metrics.add(metric);
+      this.metrics.add(metric);
       metric.updateMetricManager(this);
     } else {
       logger.error("Failed to add metric");
@@ -168,13 +168,14 @@ public class MetricReportManager {
 
   /**
    * Get metric object for a given metric name
+   *
    * @param name metricName
    * @return metric Object, if found. Otherwise null.
    */
   public IMetric<?> getMetricFromName(final String name) {
     IMetric<?> metric = null;
     if (name != null) {
-      for (IMetric<?> currentMetric : metrics) {
+      for (final IMetric<?> currentMetric : this.metrics) {
         if (currentMetric.getName().equals(name)) {
           metric = currentMetric;
           break;
@@ -186,10 +187,9 @@ public class MetricReportManager {
 
   /**
    * Get all the emitters
-   * @return
    */
   public List<IMetric<?>> getAllMetrics() {
-    return metrics;
+    return this.metrics;
   }
 
   public void enableManager() {
@@ -204,10 +204,10 @@ public class MetricReportManager {
     logger.info("Disabling Metric Manager");
     if (isManagerEnabled) {
       isManagerEnabled = false;
-      for (IMetricEmitter emitter : metricEmitters) {
+      for (final IMetricEmitter emitter : this.metricEmitters) {
         try {
           emitter.purgeAllData();
-        } catch (MetricException ex) {
+        } catch (final MetricException ex) {
           logger.error("Failed to purge data ", ex);
         }
       }
@@ -217,10 +217,11 @@ public class MetricReportManager {
   /**
    * Shutdown execution service
    * {@inheritDoc}
+   *
    * @see java.lang.Object#finalize()
    */
   @Override
   protected void finalize() {
-    executorService.shutdown();
+    this.executorService.shutdown();
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/metric/TimeBasedReportingMetric.java b/azkaban-common/src/main/java/azkaban/metric/TimeBasedReportingMetric.java
index b932aec..55d03d1 100644
--- a/azkaban-common/src/main/java/azkaban/metric/TimeBasedReportingMetric.java
+++ b/azkaban-common/src/main/java/azkaban/metric/TimeBasedReportingMetric.java
@@ -21,12 +21,14 @@ import java.util.TimerTask;
 
 /**
  * Metrics tracked after every interval using timer
+ *
  * @param <T> Type of Value of a given metric
  */
 public abstract class TimeBasedReportingMetric<T> extends AbstractMetric<T> {
-  private Timer timer;
+
   protected long MAX_MILISEC_INTERVAL = 60 * 60 * 1000;
   protected long MIN_MILISEC_INTERVAL = 3 * 1000;
+  private Timer timer;
 
   /**
    * @param metricName Name of metric
@@ -34,25 +36,27 @@ public abstract class TimeBasedReportingMetric<T> extends AbstractMetric<T> {
    * @param initialValue Initial Value of a metric
    * @param manager Metric Manager whom a metric will report to
    * @param interval Time interval for metric tracking
-   * @throws MetricException
    */
-  public TimeBasedReportingMetric(String metricName, String metricType, T initialValue, MetricReportManager manager,
-      long interval) throws MetricException {
+  public TimeBasedReportingMetric(final String metricName, final String metricType,
+      final T initialValue,
+      final MetricReportManager manager,
+      final long interval) throws MetricException {
     super(metricName, metricType, initialValue, manager);
-    if(!isValidInterval(interval)) {
+    if (!isValidInterval(interval)) {
       throw new MetricException("Invalid interval: Cannot instantiate timer");
     }
-    timer = new Timer();
-    timer.schedule(getTimerTask(), interval, interval);
+    this.timer = new Timer();
+    this.timer.schedule(getTimerTask(), interval, interval);
   }
 
   /**
    * Get a TimerTask to reschedule Timer
+   *
    * @return An anonymous TimerTask class
    */
   private TimerTask getTimerTask() {
     final TimeBasedReportingMetric<T> lockObject = this;
-    TimerTask recurringReporting = new TimerTask() {
+    final TimerTask recurringReporting = new TimerTask() {
       @Override
       public void run() {
         synchronized (lockObject) {
@@ -67,21 +71,20 @@ public abstract class TimeBasedReportingMetric<T> extends AbstractMetric<T> {
 
   /**
    * Method to change tracking interval
-   * @param interval
-   * @throws MetricException
    */
   public void updateInterval(final long interval) throws MetricException {
-    if(!isValidInterval(interval)) {
+    if (!isValidInterval(interval)) {
       throw new MetricException("Invalid interval: Cannot update timer");
     }
-    logger.debug(String.format("Updating tracking interval to %d milisecond for %s metric", interval, getName()));
-    timer.cancel();
-    timer = new Timer();
-    timer.schedule(getTimerTask(), interval, interval);
+    logger.debug(String
+        .format("Updating tracking interval to %d milisecond for %s metric", interval, getName()));
+    this.timer.cancel();
+    this.timer = new Timer();
+    this.timer.schedule(getTimerTask(), interval, interval);
   }
 
   private boolean isValidInterval(final long interval) {
-    return interval >= MIN_MILISEC_INTERVAL && interval <= MAX_MILISEC_INTERVAL;
+    return interval >= this.MIN_MILISEC_INTERVAL && interval <= this.MAX_MILISEC_INTERVAL;
   }
 
   /**
diff --git a/azkaban-common/src/main/java/azkaban/metrics/CommonMetrics.java b/azkaban-common/src/main/java/azkaban/metrics/CommonMetrics.java
index fcd4c64..5532c0a 100644
--- a/azkaban-common/src/main/java/azkaban/metrics/CommonMetrics.java
+++ b/azkaban-common/src/main/java/azkaban/metrics/CommonMetrics.java
@@ -18,7 +18,6 @@ package azkaban.metrics;
 
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.MetricRegistry;
-
 import java.util.concurrent.atomic.AtomicLong;
 
 /**
@@ -29,22 +28,22 @@ import java.util.concurrent.atomic.AtomicLong;
 public enum CommonMetrics {
   INSTANCE;
 
+  private final AtomicLong dbConnectionTime = new AtomicLong(0L);
+  private final AtomicLong OOMWaitingJobCount = new AtomicLong(0L);
+  private final MetricRegistry registry;
   private Meter dbConnectionMeter;
   private Meter flowFailMeter;
-  private AtomicLong dbConnectionTime = new AtomicLong(0L);
-  private AtomicLong OOMWaitingJobCount = new AtomicLong(0L);
-  private MetricRegistry registry;
 
   CommonMetrics() {
-    registry = MetricsManager.INSTANCE.getRegistry();
+    this.registry = MetricsManager.INSTANCE.getRegistry();
     setupAllMetrics();
   }
 
   private void setupAllMetrics() {
-    dbConnectionMeter = MetricsUtility.addMeter("DB-Connection-meter", registry);
-    flowFailMeter = MetricsUtility.addMeter("flow-fail-meter", registry);
-    MetricsUtility.addGauge("OOM-waiting-job-count", registry, OOMWaitingJobCount::get);
-    MetricsUtility.addGauge("dbConnectionTime", registry, dbConnectionTime::get);
+    this.dbConnectionMeter = MetricsUtility.addMeter("DB-Connection-meter", this.registry);
+    this.flowFailMeter = MetricsUtility.addMeter("flow-fail-meter", this.registry);
+    MetricsUtility.addGauge("OOM-waiting-job-count", this.registry, this.OOMWaitingJobCount::get);
+    MetricsUtility.addGauge("dbConnectionTime", this.registry, this.dbConnectionTime::get);
   }
 
   /**
@@ -58,7 +57,7 @@ public enum CommonMetrics {
      * 1). drop wizard metrics deals with concurrency internally;
      * 2). mark is basically a math addition operation, which should not cause race condition issue.
      */
-    dbConnectionMeter.mark();
+    this.dbConnectionMeter.mark();
   }
 
   /**
@@ -66,26 +65,25 @@ public enum CommonMetrics {
    * This method could be called by Web Server or Executor, as they both detect flow failure.
    */
   public void markFlowFail() {
-    flowFailMeter.mark();
+    this.flowFailMeter.mark();
   }
 
-  public void setDBConnectionTime(long milliseconds) {
-    dbConnectionTime.set(milliseconds);
+  public void setDBConnectionTime(final long milliseconds) {
+    this.dbConnectionTime.set(milliseconds);
   }
 
   /**
    * Mark the occurrence of an job waiting event due to OOM
    */
   public void incrementOOMJobWaitCount() {
-    OOMWaitingJobCount.incrementAndGet();
+    this.OOMWaitingJobCount.incrementAndGet();
   }
 
   /**
    * Unmark the occurrence of an job waiting event due to OOM
    */
   public void decrementOOMJobWaitCount() {
-    OOMWaitingJobCount.decrementAndGet();
+    this.OOMWaitingJobCount.decrementAndGet();
   }
 
-
 }
diff --git a/azkaban-common/src/main/java/azkaban/metrics/MetricsManager.java b/azkaban-common/src/main/java/azkaban/metrics/MetricsManager.java
index d7b1950..d978591 100644
--- a/azkaban-common/src/main/java/azkaban/metrics/MetricsManager.java
+++ b/azkaban-common/src/main/java/azkaban/metrics/MetricsManager.java
@@ -16,50 +16,48 @@
 
 package azkaban.metrics;
 
-import azkaban.utils.Props;
-import static azkaban.Constants.ConfigurationKeys.METRICS_SERVER_URL;
 import static azkaban.Constants.ConfigurationKeys.CUSTOM_METRICS_REPORTER_CLASS_NAME;
+import static azkaban.Constants.ConfigurationKeys.METRICS_SERVER_URL;
 
-import com.codahale.metrics.MetricRegistry;
+import azkaban.utils.Props;
 import com.codahale.metrics.ConsoleReporter;
-import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
+import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
+import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
 import com.codahale.metrics.jvm.ThreadStatesGaugeSet;
-
-import org.apache.log4j.Logger;
-
 import java.lang.reflect.Constructor;
 import java.time.Duration;
 import java.util.concurrent.TimeUnit;
+import org.apache.log4j.Logger;
 
 /**
- * The singleton class, MetricsManager, is the place to have MetricRegistry and
- * ConsoleReporter in this class. Also, web servers and executors can call {@link #startReporting(String, Props)}
- * to start reporting AZ metrics to remote metrics server.
+ * The singleton class, MetricsManager, is the place to have MetricRegistry and ConsoleReporter in
+ * this class. Also, web servers and executors can call {@link #startReporting(String, Props)} to
+ * start reporting AZ metrics to remote metrics server.
  */
 public enum MetricsManager {
   INSTANCE;
 
-  private final MetricRegistry registry        = new MetricRegistry();
-  private ConsoleReporter consoleReporter      = null;
   private static final Logger logger = Logger.getLogger(MetricsManager.class);
+  private final MetricRegistry registry = new MetricRegistry();
+  private ConsoleReporter consoleReporter = null;
 
   /**
    * Constructor is eaagerly called when this class is loaded.
    */
   private MetricsManager() {
-    registry.register("MEMORY_Gauge", new MemoryUsageGaugeSet());
-    registry.register("GC_Gauge", new GarbageCollectorMetricSet());
-    registry.register("Thread_State_Gauge", new ThreadStatesGaugeSet());
+    this.registry.register("MEMORY_Gauge", new MemoryUsageGaugeSet());
+    this.registry.register("GC_Gauge", new GarbageCollectorMetricSet());
+    this.registry.register("Thread_State_Gauge", new ThreadStatesGaugeSet());
   }
+
   /**
    * Return the Metrics registry.
    *
-   * @return the single {@code MetricRegistry} used for all of Az Metrics
-   *         monitoring
+   * @return the single {@code MetricRegistry} used for all of Az Metrics monitoring
    */
   public MetricRegistry getRegistry() {
-    return registry;
+    return this.registry;
   }
 
   /**
@@ -67,19 +65,19 @@ public enum MetricsManager {
    * Note: this method must be synchronized, since both web server and executor
    * will call it during initialization.
    */
-  public synchronized void startReporting(String reporterName, Props props) {
-    String metricsReporterClassName = props.get(CUSTOM_METRICS_REPORTER_CLASS_NAME);
-    String metricsServerURL = props.get(METRICS_SERVER_URL);
+  public synchronized void startReporting(final String reporterName, final Props props) {
+    final String metricsReporterClassName = props.get(CUSTOM_METRICS_REPORTER_CLASS_NAME);
+    final String metricsServerURL = props.get(METRICS_SERVER_URL);
     if (metricsReporterClassName != null && metricsServerURL != null) {
       try {
         logger.info("metricsReporterClassName: " + metricsReporterClassName);
-        Class metricsClass = Class.forName(metricsReporterClassName);
+        final Class metricsClass = Class.forName(metricsReporterClassName);
 
-        Constructor[] constructors =
+        final Constructor[] constructors =
             metricsClass.getConstructors();
-        constructors[0].newInstance(reporterName, registry, metricsServerURL);
+        constructors[0].newInstance(reporterName, this.registry, metricsServerURL);
 
-      } catch (Exception e) {
+      } catch (final Exception e) {
         logger.error("Encountered error while loading and instantiating "
             + metricsReporterClassName, e);
         throw new IllegalStateException(
@@ -96,16 +94,16 @@ public enum MetricsManager {
 
   /**
    * Create a ConsoleReporter to the AZ Metrics registry.
-   * @param reportInterval
-   *            time to wait between dumping metrics to the console
+   *
+   * @param reportInterval time to wait between dumping metrics to the console
    */
-  public synchronized void addConsoleReporter(Duration reportInterval) {
-    if (null != consoleReporter) {
+  public synchronized void addConsoleReporter(final Duration reportInterval) {
+    if (null != this.consoleReporter) {
       return;
     }
 
-    consoleReporter = ConsoleReporter.forRegistry(getRegistry()).build();
-    consoleReporter.start(reportInterval.toMillis(), TimeUnit.MILLISECONDS);
+    this.consoleReporter = ConsoleReporter.forRegistry(getRegistry()).build();
+    this.consoleReporter.start(reportInterval.toMillis(), TimeUnit.MILLISECONDS);
   }
 
   /**
@@ -113,9 +111,10 @@ public enum MetricsManager {
    * {@link #addConsoleReporter(Duration)} and release it for GC.
    */
   public synchronized void removeConsoleReporter() {
-    if (null != consoleReporter)
-      consoleReporter.stop();
+    if (null != this.consoleReporter) {
+      this.consoleReporter.stop();
+    }
 
-    consoleReporter = null;
+    this.consoleReporter = null;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/metrics/MetricsUtility.java b/azkaban-common/src/main/java/azkaban/metrics/MetricsUtility.java
index 9fe15fa..5be2b7b 100644
--- a/azkaban-common/src/main/java/azkaban/metrics/MetricsUtility.java
+++ b/azkaban-common/src/main/java/azkaban/metrics/MetricsUtility.java
@@ -3,8 +3,6 @@ package azkaban.metrics;
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.Timer;
-
 import java.util.function.Supplier;
 
 /**
@@ -21,22 +19,22 @@ public final class MetricsUtility {
    * A {@link Meter} measures the rate of events over time (e.g., “requests per second”).
    * Here we track 1-minute moving averages.
    */
-  public static Meter addMeter(String name, MetricRegistry registry) {
-    Meter curr = registry.meter(name);
+  public static Meter addMeter(final String name, final MetricRegistry registry) {
+    final Meter curr = registry.meter(name);
     registry.register(name + "-gauge", (Gauge<Double>) curr::getFifteenMinuteRate);
     return curr;
   }
 
   /**
-   * A {@link Gauge} is an instantaneous reading of a particular value.
-   * This method leverages Supplier, a Functional Interface, to get Generics metrics values.
-   * With this support, no matter what our interesting metrics is a Double or a Long, we could pass it
-   * to Metrics Parser.
+   * A {@link Gauge} is an instantaneous reading of a particular value. This method leverages
+   * Supplier, a Functional Interface, to get Generics metrics values. With this support, no matter
+   * what our interesting metrics is a Double or a Long, we could pass it to Metrics Parser.
    *
-   * E.g., in {@link CommonMetrics#setupAllMetrics()}, we construct a supplier lambda by having
-   * a AtomicLong object and its get method, in order to collect dbConnection metric.
+   * E.g., in {@link CommonMetrics#setupAllMetrics()}, we construct a supplier lambda by having a
+   * AtomicLong object and its get method, in order to collect dbConnection metric.
    */
-  public static <T> void addGauge(String name, MetricRegistry registry, Supplier<T> gaugeFunc) {
+  public static <T> void addGauge(final String name, final MetricRegistry registry,
+      final Supplier<T> gaugeFunc) {
     registry.register(name, (Gauge<T>) gaugeFunc::get);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/DirectoryFlowLoader.java b/azkaban-common/src/main/java/azkaban/project/DirectoryFlowLoader.java
index 2295435..6055fea 100644
--- a/azkaban-common/src/main/java/azkaban/project/DirectoryFlowLoader.java
+++ b/azkaban-common/src/main/java/azkaban/project/DirectoryFlowLoader.java
@@ -16,20 +16,6 @@
 
 package azkaban.project;
 
-import java.io.File;
-import java.io.FileFilter;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.log4j.Logger;
-
 import azkaban.flow.CommonJobProperties;
 import azkaban.flow.Edge;
 import azkaban.flow.Flow;
@@ -43,20 +29,33 @@ import azkaban.project.validator.XmlValidatorManager;
 import azkaban.utils.Props;
 import azkaban.utils.PropsUtils;
 import azkaban.utils.Utils;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.apache.log4j.Logger;
 
 public class DirectoryFlowLoader implements ProjectValidator {
-  private static final DirFilter DIR_FILTER = new DirFilter();
-  private static final String PROPERTY_SUFFIX = ".properties";
-  private static final String JOB_SUFFIX = ".job";
+
   public static final String JOB_MAX_XMS = "job.max.Xms";
   public static final String MAX_XMS_DEFAULT = "1G";
   public static final String JOB_MAX_XMX = "job.max.Xmx";
   public static final String MAX_XMX_DEFAULT = "2G";
+  private static final DirFilter DIR_FILTER = new DirFilter();
+  private static final String PROPERTY_SUFFIX = ".properties";
+  private static final String JOB_SUFFIX = ".job";
   private static final String XMS = "Xms";
   private static final String XMX = "Xmx";
 
   private final Logger logger;
-  private Props props;
+  private final Props props;
   private HashSet<String> rootNodes;
   private HashMap<String, Flow> flowMap;
   private HashMap<String, Node> nodeMap;
@@ -77,7 +76,7 @@ public class DirectoryFlowLoader implements ProjectValidator {
    * @param props Properties to add.
    * @param logger The Logger to use.
    */
-  public DirectoryFlowLoader(Props props, Logger logger) {
+  public DirectoryFlowLoader(final Props props, final Logger logger) {
     this.logger = logger;
     this.props = props;
   }
@@ -88,7 +87,7 @@ public class DirectoryFlowLoader implements ProjectValidator {
    * @return Map of flow name to Flow.
    */
   public Map<String, Flow> getFlowMap() {
-    return flowMap;
+    return this.flowMap;
   }
 
   /**
@@ -97,7 +96,7 @@ public class DirectoryFlowLoader implements ProjectValidator {
    * @return Set of error strings.
    */
   public Set<String> getErrors() {
-    return errors;
+    return this.errors;
   }
 
   /**
@@ -106,7 +105,7 @@ public class DirectoryFlowLoader implements ProjectValidator {
    * @return Map of job name to properties.
    */
   public Map<String, Props> getJobProps() {
-    return jobPropsMap;
+    return this.jobPropsMap;
   }
 
   /**
@@ -115,7 +114,7 @@ public class DirectoryFlowLoader implements ProjectValidator {
    * @return List of Props.
    */
   public List<Props> getProps() {
-    return propsList;
+    return this.propsList;
   }
 
   /**
@@ -124,17 +123,17 @@ public class DirectoryFlowLoader implements ProjectValidator {
    * @param project The project to load flows to.
    * @param baseDirectory The directory to load flows from.
    */
-  public void loadProjectFlow(Project project, File baseDirectory) {
-    propsList = new ArrayList<Props>();
-    flowPropsList = new ArrayList<FlowProps>();
-    jobPropsMap = new HashMap<String, Props>();
-    nodeMap = new HashMap<String, Node>();
-    flowMap = new HashMap<String, Flow>();
-    errors = new HashSet<String>();
-    duplicateJobs = new HashSet<String>();
-    nodeDependencies = new HashMap<String, Map<String, Edge>>();
-    rootNodes = new HashSet<String>();
-    flowDependencies = new HashMap<String, Set<String>>();
+  public void loadProjectFlow(final Project project, final File baseDirectory) {
+    this.propsList = new ArrayList<>();
+    this.flowPropsList = new ArrayList<>();
+    this.jobPropsMap = new HashMap<>();
+    this.nodeMap = new HashMap<>();
+    this.flowMap = new HashMap<>();
+    this.errors = new HashSet<>();
+    this.duplicateJobs = new HashSet<>();
+    this.nodeDependencies = new HashMap<>();
+    this.rootNodes = new HashSet<>();
+    this.flowDependencies = new HashMap<>();
 
     // Load all the props files and create the Node objects
     loadProjectFromDir(baseDirectory.getPath(), baseDirectory, null);
@@ -152,47 +151,47 @@ public class DirectoryFlowLoader implements ProjectValidator {
 
   }
 
-  private void loadProjectFromDir(String base, File dir, Props parent) {
-    File[] propertyFiles = dir.listFiles(new SuffixFilter(PROPERTY_SUFFIX));
+  private void loadProjectFromDir(final String base, final File dir, Props parent) {
+    final File[] propertyFiles = dir.listFiles(new SuffixFilter(PROPERTY_SUFFIX));
     Arrays.sort(propertyFiles);
 
-    for (File file : propertyFiles) {
-      String relative = getRelativeFilePath(base, file.getPath());
+    for (final File file : propertyFiles) {
+      final String relative = getRelativeFilePath(base, file.getPath());
       try {
         parent = new Props(parent, file);
         parent.setSource(relative);
 
-        FlowProps flowProps = new FlowProps(parent);
-        flowPropsList.add(flowProps);
-      } catch (IOException e) {
-        errors.add("Error loading properties " + file.getName() + ":"
+        final FlowProps flowProps = new FlowProps(parent);
+        this.flowPropsList.add(flowProps);
+      } catch (final IOException e) {
+        this.errors.add("Error loading properties " + file.getName() + ":"
             + e.getMessage());
       }
 
-      logger.info("Adding " + relative);
-      propsList.add(parent);
+      this.logger.info("Adding " + relative);
+      this.propsList.add(parent);
     }
 
     // Load all Job files. If there's a duplicate name, then we don't load
-    File[] jobFiles = dir.listFiles(new SuffixFilter(JOB_SUFFIX));
-    for (File file : jobFiles) {
-      String jobName = getNameWithoutExtension(file);
+    final File[] jobFiles = dir.listFiles(new SuffixFilter(JOB_SUFFIX));
+    for (final File file : jobFiles) {
+      final String jobName = getNameWithoutExtension(file);
       try {
-        if (!duplicateJobs.contains(jobName)) {
-          if (jobPropsMap.containsKey(jobName)) {
-            errors.add("Duplicate job names found '" + jobName + "'.");
-            duplicateJobs.add(jobName);
-            jobPropsMap.remove(jobName);
-            nodeMap.remove(jobName);
+        if (!this.duplicateJobs.contains(jobName)) {
+          if (this.jobPropsMap.containsKey(jobName)) {
+            this.errors.add("Duplicate job names found '" + jobName + "'.");
+            this.duplicateJobs.add(jobName);
+            this.jobPropsMap.remove(jobName);
+            this.nodeMap.remove(jobName);
           } else {
-            Props prop = new Props(parent, file);
-            String relative = getRelativeFilePath(base, file.getPath());
+            final Props prop = new Props(parent, file);
+            final String relative = getRelativeFilePath(base, file.getPath());
             prop.setSource(relative);
 
-            Node node = new Node(jobName);
-            String type = prop.getString("type", null);
+            final Node node = new Node(jobName);
+            final String type = prop.getString("type", null);
             if (type == null) {
-              errors.add("Job doesn't have type set '" + jobName + "'.");
+              this.errors.add("Job doesn't have type set '" + jobName + "'.");
             }
 
             node.setType(type);
@@ -204,46 +203,46 @@ public class DirectoryFlowLoader implements ProjectValidator {
 
             // Force root node
             if (prop.getBoolean(CommonJobProperties.ROOT_NODE, false)) {
-              rootNodes.add(jobName);
+              this.rootNodes.add(jobName);
             }
 
-            jobPropsMap.put(jobName, prop);
-            nodeMap.put(jobName, node);
+            this.jobPropsMap.put(jobName, prop);
+            this.nodeMap.put(jobName, node);
           }
         }
-      } catch (IOException e) {
-        errors.add("Error loading job file " + file.getName() + ":"
+      } catch (final IOException e) {
+        this.errors.add("Error loading job file " + file.getName() + ":"
             + e.getMessage());
       }
     }
 
-    File[] subDirs = dir.listFiles(DIR_FILTER);
-    for (File file : subDirs) {
+    final File[] subDirs = dir.listFiles(DIR_FILTER);
+    for (final File file : subDirs) {
       loadProjectFromDir(base, file, parent);
     }
   }
 
   private void resolveEmbeddedFlows() {
-    for (String flowId : flowDependencies.keySet()) {
-      HashSet<String> visited = new HashSet<String>();
+    for (final String flowId : this.flowDependencies.keySet()) {
+      final HashSet<String> visited = new HashSet<>();
       resolveEmbeddedFlow(flowId, visited);
     }
   }
 
-  private void resolveEmbeddedFlow(String flowId, Set<String> visited) {
-    Set<String> embeddedFlow = flowDependencies.get(flowId);
+  private void resolveEmbeddedFlow(final String flowId, final Set<String> visited) {
+    final Set<String> embeddedFlow = this.flowDependencies.get(flowId);
     if (embeddedFlow == null) {
       return;
     }
 
     visited.add(flowId);
-    for (String embeddedFlowId : embeddedFlow) {
+    for (final String embeddedFlowId : embeddedFlow) {
       if (visited.contains(embeddedFlowId)) {
-        errors.add("Embedded flow cycle found in " + flowId + "->"
+        this.errors.add("Embedded flow cycle found in " + flowId + "->"
             + embeddedFlowId);
         return;
-      } else if (!flowMap.containsKey(embeddedFlowId)) {
-        errors.add("Flow " + flowId + " depends on " + embeddedFlowId
+      } else if (!this.flowMap.containsKey(embeddedFlowId)) {
+        this.errors.add("Flow " + flowId + " depends on " + embeddedFlowId
             + " but can't be found.");
         return;
       } else {
@@ -257,22 +256,22 @@ public class DirectoryFlowLoader implements ProjectValidator {
   private void resolveDependencies() {
     // Add all the in edges and out edges. Catch bad dependencies and self
     // referrals. Also collect list of nodes who are parents.
-    for (Node node : nodeMap.values()) {
-      Props props = jobPropsMap.get(node.getId());
+    for (final Node node : this.nodeMap.values()) {
+      final Props props = this.jobPropsMap.get(node.getId());
 
       if (props == null) {
-        logger.error("Job props not found!! For some reason.");
+        this.logger.error("Job props not found!! For some reason.");
         continue;
       }
 
-      List<String> dependencyList =
+      final List<String> dependencyList =
           props.getStringList(CommonJobProperties.DEPENDENCIES,
               (List<String>) null);
 
       if (dependencyList != null) {
-        Map<String, Edge> dependencies = nodeDependencies.get(node.getId());
+        Map<String, Edge> dependencies = this.nodeDependencies.get(node.getId());
         if (dependencies == null) {
-          dependencies = new HashMap<String, Edge>();
+          dependencies = new HashMap<>();
 
           for (String dependencyName : dependencyList) {
             dependencyName =
@@ -281,32 +280,32 @@ public class DirectoryFlowLoader implements ProjectValidator {
               continue;
             }
 
-            Edge edge = new Edge(dependencyName, node.getId());
-            Node dependencyNode = nodeMap.get(dependencyName);
+            final Edge edge = new Edge(dependencyName, node.getId());
+            final Node dependencyNode = this.nodeMap.get(dependencyName);
             if (dependencyNode == null) {
-              if (duplicateJobs.contains(dependencyName)) {
+              if (this.duplicateJobs.contains(dependencyName)) {
                 edge.setError("Ambiguous Dependency. Duplicates found.");
                 dependencies.put(dependencyName, edge);
-                errors.add(node.getId() + " has ambiguous dependency "
+                this.errors.add(node.getId() + " has ambiguous dependency "
                     + dependencyName);
               } else {
                 edge.setError("Dependency not found.");
                 dependencies.put(dependencyName, edge);
-                errors.add(node.getId() + " cannot find dependency "
+                this.errors.add(node.getId() + " cannot find dependency "
                     + dependencyName);
               }
             } else if (dependencyNode == node) {
               // We have a self cycle
               edge.setError("Self cycle found.");
               dependencies.put(dependencyName, edge);
-              errors.add(node.getId() + " has a self cycle");
+              this.errors.add(node.getId() + " has a self cycle");
             } else {
               dependencies.put(dependencyName, edge);
             }
           }
 
           if (!dependencies.isEmpty()) {
-            nodeDependencies.put(node.getId(), dependencies);
+            this.nodeDependencies.put(node.getId(), dependencies);
           }
         }
       }
@@ -315,44 +314,41 @@ public class DirectoryFlowLoader implements ProjectValidator {
 
   private void buildFlowsFromDependencies() {
     // Find all root nodes by finding ones without dependents.
-    HashSet<String> nonRootNodes = new HashSet<String>();
-    for (Map<String, Edge> edges : nodeDependencies.values()) {
-      for (String sourceId : edges.keySet()) {
+    final HashSet<String> nonRootNodes = new HashSet<>();
+    for (final Map<String, Edge> edges : this.nodeDependencies.values()) {
+      for (final String sourceId : edges.keySet()) {
         nonRootNodes.add(sourceId);
       }
     }
 
     // Now create flows. Bad flows are marked invalid
-    Set<String> visitedNodes = new HashSet<String>();
-    for (Node base : nodeMap.values()) {
+    final Set<String> visitedNodes = new HashSet<>();
+    for (final Node base : this.nodeMap.values()) {
       // Root nodes can be discovered when parsing jobs
-      if (rootNodes.contains(base.getId())
+      if (this.rootNodes.contains(base.getId())
           || !nonRootNodes.contains(base.getId())) {
-        rootNodes.add(base.getId());
-        Flow flow = new Flow(base.getId());
-        Props jobProp = jobPropsMap.get(base.getId());
+        this.rootNodes.add(base.getId());
+        final Flow flow = new Flow(base.getId());
+        final Props jobProp = this.jobPropsMap.get(base.getId());
 
         // Dedup with sets
-        @SuppressWarnings("unchecked")
-        List<String> successEmailList =
+        final List<String> successEmailList =
             jobProp.getStringList(CommonJobProperties.SUCCESS_EMAILS,
                 Collections.EMPTY_LIST);
-        Set<String> successEmail = new HashSet<String>();
-        for (String email : successEmailList) {
+        final Set<String> successEmail = new HashSet<>();
+        for (final String email : successEmailList) {
           successEmail.add(email.toLowerCase());
         }
 
-        @SuppressWarnings("unchecked")
-        List<String> failureEmailList =
+        final List<String> failureEmailList =
             jobProp.getStringList(CommonJobProperties.FAILURE_EMAILS,
                 Collections.EMPTY_LIST);
-        Set<String> failureEmail = new HashSet<String>();
-        for (String email : failureEmailList) {
+        final Set<String> failureEmail = new HashSet<>();
+        for (final String email : failureEmailList) {
           failureEmail.add(email.toLowerCase());
         }
 
-        @SuppressWarnings("unchecked")
-        List<String> notifyEmailList =
+        final List<String> notifyEmailList =
             jobProp.getStringList(CommonJobProperties.NOTIFY_EMAILS,
                 Collections.EMPTY_LIST);
         for (String email : notifyEmailList) {
@@ -364,32 +360,32 @@ public class DirectoryFlowLoader implements ProjectValidator {
         flow.addFailureEmails(failureEmail);
         flow.addSuccessEmails(successEmail);
 
-        flow.addAllFlowProperties(flowPropsList);
+        flow.addAllFlowProperties(this.flowPropsList);
         constructFlow(flow, base, visitedNodes);
         flow.initialize();
-        flowMap.put(base.getId(), flow);
+        this.flowMap.put(base.getId(), flow);
       }
     }
   }
 
-  private void constructFlow(Flow flow, Node node, Set<String> visited) {
+  private void constructFlow(final Flow flow, final Node node, final Set<String> visited) {
     visited.add(node.getId());
 
     flow.addNode(node);
     if (SpecialJobTypes.EMBEDDED_FLOW_TYPE.equals(node.getType())) {
-      Props props = jobPropsMap.get(node.getId());
-      String embeddedFlow = props.get(SpecialJobTypes.FLOW_NAME);
+      final Props props = this.jobPropsMap.get(node.getId());
+      final String embeddedFlow = props.get(SpecialJobTypes.FLOW_NAME);
 
-      Set<String> embeddedFlows = flowDependencies.get(flow.getId());
+      Set<String> embeddedFlows = this.flowDependencies.get(flow.getId());
       if (embeddedFlows == null) {
-        embeddedFlows = new HashSet<String>();
-        flowDependencies.put(flow.getId(), embeddedFlows);
+        embeddedFlows = new HashSet<>();
+        this.flowDependencies.put(flow.getId(), embeddedFlows);
       }
 
       node.setEmbeddedFlowId(embeddedFlow);
       embeddedFlows.add(embeddedFlow);
     }
-    Map<String, Edge> dependencies = nodeDependencies.get(node.getId());
+    final Map<String, Edge> dependencies = this.nodeDependencies.get(node.getId());
 
     if (dependencies != null) {
       for (Edge edge : dependencies.values()) {
@@ -399,12 +395,12 @@ public class DirectoryFlowLoader implements ProjectValidator {
           // We have a cycle. We set it as an error edge
           edge = new Edge(edge.getSourceId(), node.getId());
           edge.setError("Cyclical dependencies found.");
-          errors.add("Cyclical dependency found at " + edge.getId());
+          this.errors.add("Cyclical dependency found at " + edge.getId());
           flow.addEdge(edge);
         } else {
           // This should not be null
           flow.addEdge(edge);
-          Node sourceNode = nodeMap.get(edge.getSourceId());
+          final Node sourceNode = this.nodeMap.get(edge.getSourceId());
           constructFlow(flow, sourceNode, visited);
         }
       }
@@ -413,7 +409,7 @@ public class DirectoryFlowLoader implements ProjectValidator {
     visited.remove(node.getId());
   }
 
-  private void jobPropertiesCheck(Project project) {
+  private void jobPropertiesCheck(final Project project) {
     // if project is in the memory check whitelist, then we don't need to check
     // its memory settings
     if (ProjectWhitelist.isProjectWhitelisted(project.getId(),
@@ -421,83 +417,85 @@ public class DirectoryFlowLoader implements ProjectValidator {
       return;
     }
 
-    String maxXms = props.getString(JOB_MAX_XMS, MAX_XMS_DEFAULT);
-    String maxXmx = props.getString(JOB_MAX_XMX, MAX_XMX_DEFAULT);
-    long sizeMaxXms = Utils.parseMemString(maxXms);
-    long sizeMaxXmx = Utils.parseMemString(maxXmx);
+    final String maxXms = this.props.getString(JOB_MAX_XMS, MAX_XMS_DEFAULT);
+    final String maxXmx = this.props.getString(JOB_MAX_XMX, MAX_XMX_DEFAULT);
+    final long sizeMaxXms = Utils.parseMemString(maxXms);
+    final long sizeMaxXmx = Utils.parseMemString(maxXmx);
 
-    for (String jobName : jobPropsMap.keySet()) {
+    for (final String jobName : this.jobPropsMap.keySet()) {
 
-      Props jobProps = jobPropsMap.get(jobName);
-      String xms = jobProps.getString(XMS, null);
+      final Props jobProps = this.jobPropsMap.get(jobName);
+      final String xms = jobProps.getString(XMS, null);
       if (xms != null && !PropsUtils.isVarialbeReplacementPattern(xms)
           && Utils.parseMemString(xms) > sizeMaxXms) {
-        errors.add(String.format(
+        this.errors.add(String.format(
             "%s: Xms value has exceeded the allowed limit (max Xms = %s)",
             jobName, maxXms));
       }
-      String xmx = jobProps.getString(XMX, null);
+      final String xmx = jobProps.getString(XMX, null);
       if (xmx != null && !PropsUtils.isVarialbeReplacementPattern(xmx)
           && Utils.parseMemString(xmx) > sizeMaxXmx) {
-        errors.add(String.format(
+        this.errors.add(String.format(
             "%s: Xmx value has exceeded the allowed limit (max Xmx = %s)",
             jobName, maxXmx));
       }
 
       // job callback properties check
-      JobCallbackValidator.validate(jobName, props, jobProps, errors);
+      JobCallbackValidator.validate(jobName, this.props, jobProps, this.errors);
     }
   }
 
-  private String getNameWithoutExtension(File file) {
-    String filename = file.getName();
-    int index = filename.lastIndexOf('.');
+  private String getNameWithoutExtension(final File file) {
+    final String filename = file.getName();
+    final int index = filename.lastIndexOf('.');
 
     return index < 0 ? filename : filename.substring(0, index);
   }
 
-  private String getRelativeFilePath(String basePath, String filePath) {
+  private String getRelativeFilePath(final String basePath, final String filePath) {
     return filePath.substring(basePath.length() + 1);
   }
 
+  @Override
+  public boolean initialize(final Props configuration) {
+    return true;
+  }
+
+  @Override
+  public String getValidatorName() {
+    return XmlValidatorManager.DEFAULT_VALIDATOR_KEY;
+  }
+
+  @Override
+  public ValidationReport validateProject(final Project project, final File projectDir) {
+    loadProjectFlow(project, projectDir);
+    final ValidationReport report = new ValidationReport();
+    report.addErrorMsgs(this.errors);
+    return report;
+  }
+
   private static class DirFilter implements FileFilter {
+
     @Override
-    public boolean accept(File pathname) {
+    public boolean accept(final File pathname) {
       return pathname.isDirectory();
     }
   }
 
   private static class SuffixFilter implements FileFilter {
-    private String suffix;
 
-    public SuffixFilter(String suffix) {
+    private final String suffix;
+
+    public SuffixFilter(final String suffix) {
       this.suffix = suffix;
     }
 
     @Override
-    public boolean accept(File pathname) {
-      String name = pathname.getName();
+    public boolean accept(final File pathname) {
+      final String name = pathname.getName();
 
       return pathname.isFile() && !pathname.isHidden()
-          && name.length() > suffix.length() && name.endsWith(suffix);
+          && name.length() > this.suffix.length() && name.endsWith(this.suffix);
     }
   }
-
-  @Override
-  public boolean initialize(Props configuration) {
-    return true;
-  }
-
-  @Override
-  public String getValidatorName() {
-    return XmlValidatorManager.DEFAULT_VALIDATOR_KEY;
-  }
-
-  @Override
-  public ValidationReport validateProject(Project project, File projectDir) {
-    loadProjectFlow(project, projectDir);
-    ValidationReport report = new ValidationReport();
-    report.addErrorMsgs(errors);
-    return report;
-  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/JdbcProjectLoader.java b/azkaban-common/src/main/java/azkaban/project/JdbcProjectLoader.java
index c41c9c2..7bbd63f 100644
--- a/azkaban-common/src/main/java/azkaban/project/JdbcProjectLoader.java
+++ b/azkaban-common/src/main/java/azkaban/project/JdbcProjectLoader.java
@@ -16,6 +16,18 @@
 
 package azkaban.project;
 
+import azkaban.database.AbstractJdbcLoader;
+import azkaban.flow.Flow;
+import azkaban.project.ProjectLogEvent.EventType;
+import azkaban.user.Permission;
+import azkaban.user.User;
+import azkaban.utils.GZIPUtils;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Md5Hasher;
+import azkaban.utils.Pair;
+import azkaban.utils.Props;
+import azkaban.utils.PropsUtils;
+import azkaban.utils.Triple;
 import com.google.common.io.Files;
 import com.google.inject.Inject;
 import java.io.BufferedInputStream;
@@ -34,48 +46,35 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.commons.dbutils.DbUtils;
 import org.apache.commons.dbutils.QueryRunner;
 import org.apache.commons.dbutils.ResultSetHandler;
 import org.apache.commons.io.IOUtils;
 import org.apache.log4j.Logger;
 
-import azkaban.database.AbstractJdbcLoader;
-import azkaban.flow.Flow;
-import azkaban.project.ProjectLogEvent.EventType;
-import azkaban.user.Permission;
-import azkaban.user.User;
-import azkaban.utils.GZIPUtils;
-import azkaban.utils.JSONUtils;
-import azkaban.utils.Md5Hasher;
-import azkaban.utils.Pair;
-import azkaban.utils.Props;
-import azkaban.utils.PropsUtils;
-import azkaban.utils.Triple;
-
 public class JdbcProjectLoader extends AbstractJdbcLoader implements
     ProjectLoader {
+
   private static final Logger logger = Logger
       .getLogger(JdbcProjectLoader.class);
 
   private static final int CHUCK_SIZE = 1024 * 1024 * 10;
-  private File tempDir;
+  private final File tempDir;
 
   private EncodingType defaultEncodingType = EncodingType.GZIP;
 
   @Inject
-  public JdbcProjectLoader(Props props) {
+  public JdbcProjectLoader(final Props props) {
     super(props);
-    tempDir = new File(props.getString("project.temp.dir", "temp"));
-    if (!tempDir.exists()) {
-      tempDir.mkdirs();
+    this.tempDir = new File(props.getString("project.temp.dir", "temp"));
+    if (!this.tempDir.exists()) {
+      this.tempDir.mkdirs();
     }
   }
 
   @Override
   public List<Project> fetchAllActiveProjects() throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     List<Project> projects = null;
     try {
@@ -87,22 +86,22 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     return projects;
   }
 
-  private List<Project> fetchAllActiveProjects(Connection connection)
+  private List<Project> fetchAllActiveProjects(final Connection connection)
       throws ProjectManagerException {
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
 
-    ProjectResultHandler handler = new ProjectResultHandler();
+    final ProjectResultHandler handler = new ProjectResultHandler();
     List<Project> projects = null;
     try {
       projects =
           runner.query(connection,
               ProjectResultHandler.SELECT_ALL_ACTIVE_PROJECTS, handler);
 
-      for (Project project : projects) {
-        List<Triple<String, Boolean, Permission>> permissions =
+      for (final Project project : projects) {
+        final List<Triple<String, Boolean, Permission>> permissions =
             fetchPermissionsForProject(connection, project);
 
-        for (Triple<String, Boolean, Permission> entry : permissions) {
+        for (final Triple<String, Boolean, Permission> entry : permissions) {
           if (entry.getSecond()) {
             project.setGroupPermission(entry.getFirst(), entry.getThird());
           } else {
@@ -110,7 +109,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
           }
         }
       }
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error retrieving all projects", e);
     } finally {
       DbUtils.closeQuietly(connection);
@@ -120,8 +119,8 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public Project fetchProjectById(int id) throws ProjectManagerException {
-    Connection connection = getConnection();
+  public Project fetchProjectById(final int id) throws ProjectManagerException {
+    final Connection connection = getConnection();
 
     Project project = null;
     try {
@@ -133,14 +132,14 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     return project;
   }
 
-  private Project fetchProjectById(Connection connection, int id)
+  private Project fetchProjectById(final Connection connection, final int id)
       throws ProjectManagerException {
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     // Fetch the project
     Project project = null;
-    ProjectResultHandler handler = new ProjectResultHandler();
+    final ProjectResultHandler handler = new ProjectResultHandler();
     try {
-      List<Project> projects =
+      final List<Project> projects =
           runner.query(connection, ProjectResultHandler.SELECT_PROJECT_BY_ID,
               handler, id);
       if (projects.isEmpty()) {
@@ -149,17 +148,17 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       }
 
       project = projects.get(0);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(ProjectResultHandler.SELECT_PROJECT_BY_ID + " failed.");
       throw new ProjectManagerException(
           "Query for existing project failed. Project " + id, e);
     }
 
     // Fetch the user permissions
-    List<Triple<String, Boolean, Permission>> permissions =
+    final List<Triple<String, Boolean, Permission>> permissions =
         fetchPermissionsForProject(connection, project);
 
-    for (Triple<String, Boolean, Permission> perm : permissions) {
+    for (final Triple<String, Boolean, Permission> perm : permissions) {
       if (perm.getThird().toFlags() != 0) {
         if (perm.getSecond()) {
           project.setGroupPermission(perm.getFirst(), perm.getThird());
@@ -173,80 +172,80 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public Project fetchProjectByName(String name)
+  public Project fetchProjectByName(final String name)
       throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     Project project = null;
     try {
-        project = fetchProjectByName(connection, name);
+      project = fetchProjectByName(connection, name);
     } finally {
-        DbUtils.closeQuietly(connection);
+      DbUtils.closeQuietly(connection);
     }
 
     return project;
   }
 
-  private Project fetchProjectByName(Connection connection, String name)
+  private Project fetchProjectByName(final Connection connection, final String name)
       throws ProjectManagerException {
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     // Fetch the project
-    Project project;
-    ProjectResultHandler handler = new ProjectResultHandler();
+    final Project project;
+    final ProjectResultHandler handler = new ProjectResultHandler();
     // select active project from db first, if not exist, select inactive one.
     // At most one active project with the same name exists in db.
     try {
-        List<Project> projects =
+      List<Project> projects =
+          runner.query(connection,
+              ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name);
+      if (projects.isEmpty()) {
+        projects =
             runner.query(connection,
-                  ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name);
+                ProjectResultHandler.SELECT_PROJECT_BY_NAME, handler, name);
         if (projects.isEmpty()) {
-            projects =
-                runner.query(connection,
-                    ProjectResultHandler.SELECT_PROJECT_BY_NAME, handler, name);
-            if (projects.isEmpty()) {
-                throw new ProjectManagerException(
-                    "No project with name " + name + " exists in db.");
-            }
+          throw new ProjectManagerException(
+              "No project with name " + name + " exists in db.");
         }
-        project = projects.get(0);
-    } catch (SQLException e) {
-        logger.error(ProjectResultHandler.SELECT_PROJECT_BY_NAME
-            + " failed.");
-        throw new ProjectManagerException(
-            "Query for existing project failed. Project " + name, e);
+      }
+      project = projects.get(0);
+    } catch (final SQLException e) {
+      logger.error(ProjectResultHandler.SELECT_PROJECT_BY_NAME
+          + " failed.");
+      throw new ProjectManagerException(
+          "Query for existing project failed. Project " + name, e);
     }
 
     // Fetch the user permissions
-    List<Triple<String, Boolean, Permission>> permissions =
+    final List<Triple<String, Boolean, Permission>> permissions =
         fetchPermissionsForProject(connection, project);
 
-    for (Triple<String, Boolean, Permission> perm : permissions) {
-        if (perm.getThird().toFlags() != 0) {
-            if (perm.getSecond()) {
-                project
-                    .setGroupPermission(perm.getFirst(), perm.getThird());
-            } else {
-                project.setUserPermission(perm.getFirst(), perm.getThird());
-            }
+    for (final Triple<String, Boolean, Permission> perm : permissions) {
+      if (perm.getThird().toFlags() != 0) {
+        if (perm.getSecond()) {
+          project
+              .setGroupPermission(perm.getFirst(), perm.getThird());
+        } else {
+          project.setUserPermission(perm.getFirst(), perm.getThird());
         }
+      }
     }
 
     return project;
   }
 
   private List<Triple<String, Boolean, Permission>> fetchPermissionsForProject(
-      Connection connection, Project project) throws ProjectManagerException {
-    ProjectPermissionsResultHandler permHander =
+      final Connection connection, final Project project) throws ProjectManagerException {
+    final ProjectPermissionsResultHandler permHander =
         new ProjectPermissionsResultHandler();
 
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     List<Triple<String, Boolean, Permission>> permissions = null;
     try {
       permissions =
           runner.query(connection,
               ProjectPermissionsResultHandler.SELECT_PROJECT_PERMISSION,
               permHander, project.getId());
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Query for permissions for "
           + project.getName() + " failed.", e);
     }
@@ -261,9 +260,9 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
    * or the SQL fails
    */
   @Override
-  public Project createNewProject(String name, String description, User creator)
+  public Project createNewProject(final String name, final String description, final User creator)
       throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     Project project;
     try {
@@ -276,15 +275,15 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     return project;
   }
 
-  private synchronized Project createNewProject(Connection connection,
-      String name, String description, User creator)
+  private synchronized Project createNewProject(final Connection connection,
+      final String name, final String description, final User creator)
       throws ProjectManagerException {
-    QueryRunner runner = new QueryRunner();
-    ProjectResultHandler handler = new ProjectResultHandler();
+    final QueryRunner runner = new QueryRunner();
+    final ProjectResultHandler handler = new ProjectResultHandler();
 
     // See if it exists first.
     try {
-      List<Project> project =
+      final List<Project> project =
           runner
               .query(connection,
                   ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler,
@@ -293,7 +292,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
         throw new ProjectManagerException("Active project with name " + name
             + " already exists in db.");
       }
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException(
           "Checking for existing project failed. " + name, e);
@@ -303,21 +302,21 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
         "INSERT INTO projects ( name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob) values (?,?,?,?,?,?,?,?,?)";
     // Insert project
     try {
-      long time = System.currentTimeMillis();
-      int i =
+      final long time = System.currentTimeMillis();
+      final int i =
           runner.update(connection, INSERT_PROJECT, name, true, time, time,
               null, creator.getUserId(), description,
-              defaultEncodingType.getNumVal(), null);
+              this.defaultEncodingType.getNumVal(), null);
       if (i == 0) {
         throw new ProjectManagerException("No projects have been inserted.");
       }
       connection.commit();
 
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(INSERT_PROJECT + " failed.");
       try {
         connection.rollback();
-      } catch (SQLException e1) {
+      } catch (final SQLException e1) {
         e1.printStackTrace();
       }
       throw new ProjectManagerException(
@@ -327,7 +326,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     // Do another query to grab and return the project.
     Project project = null;
     try {
-      List<Project> projects =
+      final List<Project> projects =
           runner
               .query(connection,
                   ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler,
@@ -341,7 +340,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       }
 
       project = projects.get(0);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException(
           "Checking for existing project failed. " + name, e);
@@ -351,12 +350,13 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void uploadProjectFile(int projectId, int version, File localFile, String uploader)
+  public void uploadProjectFile(final int projectId, final int version, final File localFile,
+      final String uploader)
       throws ProjectManagerException {
-    long startMs = System.currentTimeMillis();
+    final long startMs = System.currentTimeMillis();
     logger.info(String.format("Uploading Project ID: %d file: %s [%d bytes]",
         projectId, localFile.getName(), localFile.length()));
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     try {
       /* Update DB with new project info */
@@ -366,10 +366,10 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       uploadProjectFile(connection, projectId, version, localFile);
 
       connection.commit();
-      long duration = (System.currentTimeMillis() - startMs) / 1000;
+      final long duration = (System.currentTimeMillis() - startMs) / 1000;
       logger.info(String.format("Uploaded Project ID: %d file: %s [%d bytes] in %d sec",
           projectId, localFile.getName(), localFile.length(), duration));
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException("Error getting DB connection.", e);
     } finally {
@@ -377,10 +377,11 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     }
   }
 
-  private void uploadProjectFile(Connection connection, int projectId, int version, File localFile)
+  private void uploadProjectFile(final Connection connection, final int projectId,
+      final int version, final File localFile)
       throws ProjectManagerException {
     /* Step 1: Upload File in chunks to DB */
-    int chunks = uploadFileInChunks(connection, projectId, version, localFile);
+    final int chunks = uploadFileInChunks(connection, projectId, version, localFile);
 
     /* Step 2: Update number of chunks in DB */
     updateChunksInProjectVersions(connection, projectId, version, chunks);
@@ -388,50 +389,51 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   @Override
   public void addProjectVersion(
-      int projectId,
-      int version,
-      File localFile,
-      String uploader,
-      byte[] md5,
-      String resourceId) throws ProjectManagerException {
+      final int projectId,
+      final int version,
+      final File localFile,
+      final String uploader,
+      final byte[] md5,
+      final String resourceId) throws ProjectManagerException {
     try (Connection connection = getConnection()) {
-      addProjectToProjectVersions(connection, projectId, version, localFile, uploader, md5, resourceId);
+      addProjectToProjectVersions(connection, projectId, version, localFile, uploader, md5,
+          resourceId);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
-      throw new ProjectManagerException(String.format("Add ProjectVersion failed. project id: %d version: %d",
-          projectId, version), e);
+      throw new ProjectManagerException(
+          String.format("Add ProjectVersion failed. project id: %d version: %d",
+              projectId, version), e);
     }
   }
 
   /**
    * Insert a new version record to TABLE project_versions before uploading files.
    *
-   * The reason for this operation:
-   * When error chunking happens in remote mysql server, incomplete file data remains
-   * in DB, and an SQL exception is thrown. If we don't have this operation before uploading file,
-   * the SQL exception prevents AZ from creating the new version record in Table project_versions.
-   * However, the Table project_files still reserve the incomplete files, which causes troubles
-   * when uploading a new file: Since the version in TABLE project_versions is still old, mysql will stop
-   * inserting new files to db.
+   * The reason for this operation: When error chunking happens in remote mysql server, incomplete
+   * file data remains in DB, and an SQL exception is thrown. If we don't have this operation before
+   * uploading file, the SQL exception prevents AZ from creating the new version record in Table
+   * project_versions. However, the Table project_files still reserve the incomplete files, which
+   * causes troubles when uploading a new file: Since the version in TABLE project_versions is still
+   * old, mysql will stop inserting new files to db.
    *
-   * Why this operation is safe:
-   * When AZ uploads a new zip file, it always fetches the latest version proj_v from TABLE project_version,
-   * proj_v+1 will be used as the new version for the uploading files.
+   * Why this operation is safe: When AZ uploads a new zip file, it always fetches the latest
+   * version proj_v from TABLE project_version, proj_v+1 will be used as the new version for the
+   * uploading files.
    *
-   * Assume error chunking happens on day 1. proj_v is created for this bad file (old file version + 1).
-   * When we upload a new project zip in day2, new file in day 2 will use the new version (proj_v + 1).
-   * When file uploading completes, AZ will clean all old chunks in DB afterward.
+   * Assume error chunking happens on day 1. proj_v is created for this bad file (old file version +
+   * 1). When we upload a new project zip in day2, new file in day 2 will use the new version
+   * (proj_v + 1). When file uploading completes, AZ will clean all old chunks in DB afterward.
    */
-  private void addProjectToProjectVersions(Connection connection,
-      int projectId,
-      int version,
-      File localFile,
-      String uploader,
-      byte[] md5,
-      String resourceId) throws ProjectManagerException {
+  private void addProjectToProjectVersions(final Connection connection,
+      final int projectId,
+      final int version,
+      final File localFile,
+      final String uploader,
+      final byte[] md5,
+      final String resourceId) throws ProjectManagerException {
     final long updateTime = System.currentTimeMillis();
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
 
     final String INSERT_PROJECT_VERSION = "INSERT INTO project_versions "
         + "(project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks, resource_id) values "
@@ -453,19 +455,20 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
           md5,
           0,
           resourceId);
-    } catch (SQLException e) {
-      String msg = String.format("Error initializing project id: %d version: %d ", projectId, version);
+    } catch (final SQLException e) {
+      final String msg = String
+          .format("Error initializing project id: %d version: %d ", projectId, version);
       logger.error(msg, e);
       throw new ProjectManagerException(msg, e);
     }
   }
 
-  private byte[] computeHash(File localFile) {
+  private byte[] computeHash(final File localFile) {
     logger.info("Creating message digest for upload " + localFile.getName());
-    byte[] md5;
+    final byte[] md5;
     try {
       md5 = Md5Hasher.md5Hash(localFile);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException("Error getting md5 hash.", e);
     }
 
@@ -473,12 +476,13 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     return md5;
   }
 
-  private int uploadFileInChunks(Connection connection, int projectId, int version, File localFile)
+  private int uploadFileInChunks(final Connection connection, final int projectId,
+      final int version, final File localFile)
       throws ProjectManagerException {
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
 
     // Really... I doubt we'll get a > 2gig file. So int casting it is!
-    byte[] buffer = new byte[CHUCK_SIZE];
+    final byte[] buffer = new byte[CHUCK_SIZE];
     final String INSERT_PROJECT_FILES =
         "INSERT INTO project_files (project_id, version, chunk, size, file) values (?,?,?,?,?)";
 
@@ -506,14 +510,14 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
            */
           connection.commit();
           logger.info("Finished update for " + localFile.getName() + " chunk " + chunk);
-        } catch (SQLException e) {
+        } catch (final SQLException e) {
           throw new ProjectManagerException("Error Chunking during uploading files to db...");
         }
         ++chunk;
 
         size = bufferedStream.read(buffer);
       }
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException(String.format(
           "Error chunking file. projectId: %d, version: %d, file:%s[%d bytes], chunk: %d",
           projectId, version, localFile.getName(), localFile.length(), chunk));
@@ -526,26 +530,29 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   /**
    * we update num_chunks's actual number to db here.
    */
-  private void updateChunksInProjectVersions(Connection connection, int projectId, int version, int chunk)
+  private void updateChunksInProjectVersions(final Connection connection, final int projectId,
+      final int version,
+      final int chunk)
       throws ProjectManagerException {
 
     final String UPDATE_PROJECT_NUM_CHUNKS =
         "UPDATE project_versions SET num_chunks=? WHERE project_id=? AND version=?";
 
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.update(connection, UPDATE_PROJECT_NUM_CHUNKS, chunk, projectId, version);
       connection.commit();
-    } catch (SQLException e) {
-      throw new ProjectManagerException("Error updating project " + projectId + " : chunk_num " + chunk, e);
+    } catch (final SQLException e) {
+      throw new ProjectManagerException(
+          "Error updating project " + projectId + " : chunk_num " + chunk, e);
     }
   }
 
   @Override
-  public ProjectFileHandler getUploadedFile(int projectId, int version)
+  public ProjectFileHandler getUploadedFile(final int projectId, final int version)
       throws ProjectManagerException {
     logger.info("Retrieving to " + projectId + " version:" + version);
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
     ProjectFileHandler handler = null;
     try {
       handler = getUploadedFile(connection, projectId, version);
@@ -557,46 +564,48 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public ProjectFileHandler fetchProjectMetaData(int projectId, int version) {
-    ProjectVersionResultHandler pfHandler = new ProjectVersionResultHandler();
+  public ProjectFileHandler fetchProjectMetaData(final int projectId, final int version) {
+    final ProjectVersionResultHandler pfHandler = new ProjectVersionResultHandler();
 
     try (Connection connection = getConnection()) {
-      List<ProjectFileHandler> projectFiles = new QueryRunner().query(connection,
+      final List<ProjectFileHandler> projectFiles = new QueryRunner().query(connection,
           ProjectVersionResultHandler.SELECT_PROJECT_VERSION, pfHandler, projectId, version);
       if (projectFiles == null || projectFiles.isEmpty()) {
         return null;
       }
       return projectFiles.get(0);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
-      throw new ProjectManagerException("Query for uploaded file for project id " + projectId + " failed.", e);
+      throw new ProjectManagerException(
+          "Query for uploaded file for project id " + projectId + " failed.", e);
     }
   }
 
-  private ProjectFileHandler getUploadedFile(Connection connection,
-      int projectId, int version) throws ProjectManagerException {
-    ProjectFileHandler projHandler = fetchProjectMetaData(projectId, version);
+  private ProjectFileHandler getUploadedFile(final Connection connection,
+      final int projectId, final int version) throws ProjectManagerException {
+    final ProjectFileHandler projHandler = fetchProjectMetaData(projectId, version);
     if (projHandler == null) {
       return null;
     }
-    int numChunks = projHandler.getNumChunks();
+    final int numChunks = projHandler.getNumChunks();
     BufferedOutputStream bStream = null;
     File file;
     try {
       try {
-        file = File.createTempFile(projHandler.getFileName(), String.valueOf(version), tempDir);
+        file = File
+            .createTempFile(projHandler.getFileName(), String.valueOf(version), this.tempDir);
         bStream = new BufferedOutputStream(new FileOutputStream(file));
-      } catch (IOException e) {
+      } catch (final IOException e) {
         throw new ProjectManagerException(
             "Error creating temp file for stream.");
       }
 
-      QueryRunner runner = new QueryRunner();
-      int collect = 5;
+      final QueryRunner runner = new QueryRunner();
+      final int collect = 5;
       int fromChunk = 0;
       int toChunk = collect;
       do {
-        ProjectFileChunkResultHandler chunkHandler =
+        final ProjectFileChunkResultHandler chunkHandler =
             new ProjectFileChunkResultHandler();
         List<byte[]> data = null;
         try {
@@ -604,17 +613,17 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
               runner.query(connection,
                   ProjectFileChunkResultHandler.SELECT_PROJECT_CHUNKS_FILE,
                   chunkHandler, projectId, version, fromChunk, toChunk);
-        } catch (SQLException e) {
+        } catch (final SQLException e) {
           logger.error(e);
           throw new ProjectManagerException("Query for uploaded file for "
               + projectId + " failed.", e);
         }
 
         try {
-          for (byte[] d : data) {
+          for (final byte[] d : data) {
             bStream.write(d);
           }
-        } catch (IOException e) {
+        } catch (final IOException e) {
           throw new ProjectManagerException("Error writing file", e);
         }
 
@@ -630,7 +639,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     byte[] md5 = null;
     try {
       md5 = Md5Hasher.md5Hash(file);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException("Error getting md5 hash.", e);
     }
 
@@ -645,10 +654,10 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void changeProjectVersion(Project project, int version, String user)
+  public void changeProjectVersion(final Project project, final int version, final String user)
       throws ProjectManagerException {
-    long timestamp = System.currentTimeMillis();
-    QueryRunner runner = createQueryRunner();
+    final long timestamp = System.currentTimeMillis();
+    final QueryRunner runner = createQueryRunner();
     try {
       final String UPDATE_PROJECT_VERSION =
           "UPDATE projects SET version=?,modified_time=?,last_modified_by=? WHERE id=?";
@@ -658,7 +667,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       project.setVersion(version);
       project.setLastModifiedTimestamp(timestamp);
       project.setLastModifiedUser(user);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException(
           "Error updating switching project version " + project.getName(), e);
@@ -666,12 +675,12 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void updatePermission(Project project, String name, Permission perm,
-      boolean isGroup) throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+  public void updatePermission(final Project project, final String name, final Permission perm,
+      final boolean isGroup) throws ProjectManagerException {
+    final QueryRunner runner = createQueryRunner();
 
     if (this.allowsOnDuplicateKey()) {
-      long updateTime = System.currentTimeMillis();
+      final long updateTime = System.currentTimeMillis();
       final String INSERT_PROJECT_PERMISSION =
           "INSERT INTO project_permissions (project_id, modified_time, name, permissions, isGroup) values (?,?,?,?,?)"
               + "ON DUPLICATE KEY UPDATE modified_time = VALUES(modified_time), permissions = VALUES(permissions)";
@@ -679,20 +688,20 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       try {
         runner.update(INSERT_PROJECT_PERMISSION, project.getId(), updateTime,
             name, perm.toFlags(), isGroup);
-      } catch (SQLException e) {
+      } catch (final SQLException e) {
         logger.error(e);
         throw new ProjectManagerException("Error updating project "
             + project.getName() + " permissions for " + name, e);
       }
     } else {
-      long updateTime = System.currentTimeMillis();
+      final long updateTime = System.currentTimeMillis();
       final String MERGE_PROJECT_PERMISSION =
           "MERGE INTO project_permissions (project_id, modified_time, name, permissions, isGroup) KEY (project_id, name) values (?,?,?,?,?)";
 
       try {
         runner.update(MERGE_PROJECT_PERMISSION, project.getId(), updateTime,
             name, perm.toFlags(), isGroup);
-      } catch (SQLException e) {
+      } catch (final SQLException e) {
         logger.error(e);
         throw new ProjectManagerException("Error updating project "
             + project.getName() + " permissions for " + name, e);
@@ -707,29 +716,29 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void updateProjectSettings(Project project)
+  public void updateProjectSettings(final Project project)
       throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
     try {
-      updateProjectSettings(connection, project, defaultEncodingType);
+      updateProjectSettings(connection, project, this.defaultEncodingType);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error updating project settings", e);
     } finally {
       DbUtils.closeQuietly(connection);
     }
   }
 
-  private void updateProjectSettings(Connection connection, Project project,
-      EncodingType encType) throws ProjectManagerException {
-    QueryRunner runner = new QueryRunner();
+  private void updateProjectSettings(final Connection connection, final Project project,
+      final EncodingType encType) throws ProjectManagerException {
+    final QueryRunner runner = new QueryRunner();
     final String UPDATE_PROJECT_SETTINGS =
         "UPDATE projects SET enc_type=?, settings_blob=? WHERE id=?";
 
-    String json = JSONUtils.toJSON(project.toObject());
+    final String json = JSONUtils.toJSON(project.toObject());
     byte[] data = null;
     try {
-      byte[] stringData = json.getBytes("UTF-8");
+      final byte[] stringData = json.getBytes("UTF-8");
       data = stringData;
 
       if (encType == EncodingType.GZIP) {
@@ -737,7 +746,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       }
       logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length
           + " Gzip:" + data.length);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException("Failed to encode. ", e);
     }
 
@@ -745,22 +754,22 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       runner.update(connection, UPDATE_PROJECT_SETTINGS, encType.getNumVal(),
           data, project.getId());
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error updating project "
           + project.getName() + " version " + project.getVersion(), e);
     }
   }
 
   @Override
-  public void removePermission(Project project, String name, boolean isGroup)
+  public void removePermission(final Project project, final String name, final boolean isGroup)
       throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
     final String DELETE_PROJECT_PERMISSION =
         "DELETE FROM project_permissions WHERE project_id=? AND name=? AND isGroup=?";
 
     try {
       runner.update(DELETE_PROJECT_PERMISSION, project.getId(), name, isGroup);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException("Error deleting project "
           + project.getName() + " permissions for " + name, e);
@@ -775,17 +784,17 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   @Override
   public List<Triple<String, Boolean, Permission>> getProjectPermissions(
-      int projectId) throws ProjectManagerException {
-    ProjectPermissionsResultHandler permHander =
+      final int projectId) throws ProjectManagerException {
+    final ProjectPermissionsResultHandler permHander =
         new ProjectPermissionsResultHandler();
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
     List<Triple<String, Boolean, Permission>> permissions = null;
     try {
       permissions =
           runner.query(
               ProjectPermissionsResultHandler.SELECT_PROJECT_PERMISSION,
               permHander, projectId);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Query for permissions for "
           + projectId + " failed.", e);
     }
@@ -794,16 +803,16 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void removeProject(Project project, String user)
+  public void removeProject(final Project project, final String user)
       throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
 
-    long updateTime = System.currentTimeMillis();
+    final long updateTime = System.currentTimeMillis();
     final String UPDATE_INACTIVE_PROJECT =
         "UPDATE projects SET active=false,modified_time=?,last_modified_by=? WHERE id=?";
     try {
       runner.update(UPDATE_INACTIVE_PROJECT, updateTime, user, project.getId());
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException("Error marking project "
           + project.getName() + " as inactive", e);
@@ -811,17 +820,17 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public boolean postEvent(Project project, EventType type, String user,
-      String message) {
-    QueryRunner runner = createQueryRunner();
+  public boolean postEvent(final Project project, final EventType type, final String user,
+      final String message) {
+    final QueryRunner runner = createQueryRunner();
 
     final String INSERT_PROJECT_EVENTS =
         "INSERT INTO project_events (project_id, event_type, event_time, username, message) values (?,?,?,?,?)";
-    long updateTime = System.currentTimeMillis();
+    final long updateTime = System.currentTimeMillis();
     try {
       runner.update(INSERT_PROJECT_EVENTS, project.getId(), type.getNumVal(),
           updateTime, user, message);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       return false;
     }
@@ -831,23 +840,19 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   /**
    * Get all the logs for a given project
-   *
-   * @param project
-   * @return
-   * @throws ProjectManagerException
    */
   @Override
-  public List<ProjectLogEvent> getProjectEvents(Project project, int num,
-      int skip) throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+  public List<ProjectLogEvent> getProjectEvents(final Project project, final int num,
+      final int skip) throws ProjectManagerException {
+    final QueryRunner runner = createQueryRunner();
 
-    ProjectLogsResultHandler logHandler = new ProjectLogsResultHandler();
+    final ProjectLogsResultHandler logHandler = new ProjectLogsResultHandler();
     List<ProjectLogEvent> events = null;
     try {
       events =
           runner.query(ProjectLogsResultHandler.SELECT_PROJECT_EVENTS_ORDER,
               logHandler, project.getId(), num, skip);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
     }
 
@@ -855,20 +860,20 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void updateDescription(Project project, String description, String user)
+  public void updateDescription(final Project project, final String description, final String user)
       throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
 
     final String UPDATE_PROJECT_DESCRIPTION =
         "UPDATE projects SET description=?,modified_time=?,last_modified_by=? WHERE id=?";
-    long updateTime = System.currentTimeMillis();
+    final long updateTime = System.currentTimeMillis();
     try {
       runner.update(UPDATE_PROJECT_DESCRIPTION, description, updateTime, user,
           project.getId());
       project.setDescription(description);
       project.setLastModifiedTimestamp(updateTime);
       project.setLastModifiedUser(user);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException("Error marking project "
           + project.getName() + " as inactive", e);
@@ -876,15 +881,15 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public int getLatestProjectVersion(Project project)
+  public int getLatestProjectVersion(final Project project)
       throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
 
-    IntHander handler = new IntHander();
+    final IntHander handler = new IntHander();
     try {
       return runner.query(IntHander.SELECT_LATEST_VERSION, handler,
           project.getId());
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(e);
       throw new ProjectManagerException("Error marking project "
           + project.getName() + " as inactive", e);
@@ -892,21 +897,21 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void uploadFlows(Project project, int version, Collection<Flow> flows)
+  public void uploadFlows(final Project project, final int version, final Collection<Flow> flows)
       throws ProjectManagerException {
     // We do one at a time instead of batch... because well, the batch could be
     // large.
     logger.info("Uploading flows");
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     try {
-      for (Flow flow : flows) {
-        uploadFlow(connection, project, version, flow, defaultEncodingType);
+      for (final Flow flow : flows) {
+        uploadFlow(connection, project, version, flow, this.defaultEncodingType);
       }
       connection.commit();
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException("Flow Upload failed.", e);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Flow Upload failed.", e);
     } finally {
       DbUtils.closeQuietly(connection);
@@ -914,17 +919,17 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void uploadFlow(Project project, int version, Flow flow)
+  public void uploadFlow(final Project project, final int version, final Flow flow)
       throws ProjectManagerException {
     logger.info("Uploading flows");
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     try {
-      uploadFlow(connection, project, version, flow, defaultEncodingType);
+      uploadFlow(connection, project, version, flow, this.defaultEncodingType);
       connection.commit();
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException("Flow Upload failed.", e);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Flow Upload failed commit.", e);
     } finally {
       DbUtils.closeQuietly(connection);
@@ -932,18 +937,18 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void updateFlow(Project project, int version, Flow flow)
+  public void updateFlow(final Project project, final int version, final Flow flow)
       throws ProjectManagerException {
     logger.info("Uploading flows");
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     try {
-      QueryRunner runner = new QueryRunner();
-      String json = JSONUtils.toJSON(flow.toObject());
-      byte[] stringData = json.getBytes("UTF-8");
+      final QueryRunner runner = new QueryRunner();
+      final String json = JSONUtils.toJSON(flow.toObject());
+      final byte[] stringData = json.getBytes("UTF-8");
       byte[] data = stringData;
 
-      if (defaultEncodingType == EncodingType.GZIP) {
+      if (this.defaultEncodingType == EncodingType.GZIP) {
         data = GZIPUtils.gzipBytes(stringData);
       }
 
@@ -952,17 +957,17 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       final String UPDATE_FLOW =
           "UPDATE project_flows SET encoding_type=?,json=? WHERE project_id=? AND version=? AND flow_id=?";
       try {
-        runner.update(connection, UPDATE_FLOW, defaultEncodingType.getNumVal(),
+        runner.update(connection, UPDATE_FLOW, this.defaultEncodingType.getNumVal(),
             data, project.getId(), version, flow.getId());
-      } catch (SQLException e) {
+      } catch (final SQLException e) {
         e.printStackTrace();
         throw new ProjectManagerException("Error inserting flow "
             + flow.getId(), e);
       }
       connection.commit();
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException("Flow Upload failed.", e);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Flow Upload failed commit.", e);
     } finally {
       DbUtils.closeQuietly(connection);
@@ -970,19 +975,19 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   public EncodingType getDefaultEncodingType() {
-    return defaultEncodingType;
+    return this.defaultEncodingType;
   }
 
-  public void setDefaultEncodingType(EncodingType defaultEncodingType) {
+  public void setDefaultEncodingType(final EncodingType defaultEncodingType) {
     this.defaultEncodingType = defaultEncodingType;
   }
 
-  private void uploadFlow(Connection connection, Project project, int version,
-      Flow flow, EncodingType encType) throws ProjectManagerException,
+  private void uploadFlow(final Connection connection, final Project project, final int version,
+      final Flow flow, final EncodingType encType) throws ProjectManagerException,
       IOException {
-    QueryRunner runner = new QueryRunner();
-    String json = JSONUtils.toJSON(flow.toObject());
-    byte[] stringData = json.getBytes("UTF-8");
+    final QueryRunner runner = new QueryRunner();
+    final String json = JSONUtils.toJSON(flow.toObject());
+    final byte[] stringData = json.getBytes("UTF-8");
     byte[] data = stringData;
 
     if (encType == EncodingType.GZIP) {
@@ -995,20 +1000,20 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     try {
       runner.update(connection, INSERT_FLOW, project.getId(), version,
           flow.getId(), System.currentTimeMillis(), encType.getNumVal(), data);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error inserting flow " + flow.getId(),
           e);
     }
   }
 
   @Override
-  public Flow fetchFlow(Project project, String flowId)
+  public Flow fetchFlow(final Project project, final String flowId)
       throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
-    ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
+    final QueryRunner runner = createQueryRunner();
+    final ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
 
     try {
-      List<Flow> flows =
+      final List<Flow> flows =
           runner.query(ProjectFlowsResultHandler.SELECT_PROJECT_FLOW, handler,
               project.getId(), project.getVersion(), flowId);
       if (flows.isEmpty()) {
@@ -1016,23 +1021,23 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       } else {
         return flows.get(0);
       }
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error fetching flow " + flowId, e);
     }
   }
 
   @Override
-  public List<Flow> fetchAllProjectFlows(Project project)
+  public List<Flow> fetchAllProjectFlows(final Project project)
       throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
-    ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
+    final QueryRunner runner = createQueryRunner();
+    final ProjectFlowsResultHandler handler = new ProjectFlowsResultHandler();
 
     List<Flow> flows = null;
     try {
       flows =
           runner.query(ProjectFlowsResultHandler.SELECT_ALL_PROJECT_FLOWS,
               handler, project.getId(), project.getVersion());
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error fetching flows from project "
           + project.getName() + " version " + project.getVersion(), e);
     }
@@ -1041,19 +1046,19 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void uploadProjectProperties(Project project, List<Props> properties)
+  public void uploadProjectProperties(final Project project, final List<Props> properties)
       throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     try {
-      for (Props props : properties) {
+      for (final Props props : properties) {
         uploadProjectProperty(connection, project, props.getSource(), props);
       }
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException(
           "Error uploading project property files", e);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException(
           "Error uploading project property files", e);
     } finally {
@@ -1062,16 +1067,16 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void uploadProjectProperty(Project project, Props props)
+  public void uploadProjectProperty(final Project project, final Props props)
       throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
     try {
       uploadProjectProperty(connection, project, props.getSource(), props);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException(
           "Error uploading project property files", e);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException(
           "Error uploading project property file", e);
     } finally {
@@ -1080,16 +1085,16 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void updateProjectProperty(Project project, Props props)
+  public void updateProjectProperty(final Project project, final Props props)
       throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
     try {
       updateProjectProperty(connection, project, props.getSource(), props);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException(
           "Error uploading project property files", e);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException(
           "Error uploading project property file", e);
     } finally {
@@ -1097,15 +1102,15 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     }
   }
 
-  private void updateProjectProperty(Connection connection, Project project,
-      String name, Props props) throws ProjectManagerException, IOException {
-    QueryRunner runner = new QueryRunner();
+  private void updateProjectProperty(final Connection connection, final Project project,
+      final String name, final Props props) throws ProjectManagerException, IOException {
+    final QueryRunner runner = new QueryRunner();
     final String UPDATE_PROPERTIES =
         "UPDATE project_properties SET property=? WHERE project_id=? AND version=? AND name=?";
 
-    String propertyJSON = PropsUtils.toJSONString(props, true);
+    final String propertyJSON = PropsUtils.toJSONString(props, true);
     byte[] data = propertyJSON.getBytes("UTF-8");
-    if (defaultEncodingType == EncodingType.GZIP) {
+    if (this.defaultEncodingType == EncodingType.GZIP) {
       data = GZIPUtils.gzipBytes(data);
     }
 
@@ -1113,30 +1118,30 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       runner.update(connection, UPDATE_PROPERTIES, data, project.getId(),
           project.getVersion(), name);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error updating property "
           + project.getName() + " version " + project.getVersion(), e);
     }
   }
 
-  private void uploadProjectProperty(Connection connection, Project project,
-      String name, Props props) throws ProjectManagerException, IOException {
-    QueryRunner runner = new QueryRunner();
+  private void uploadProjectProperty(final Connection connection, final Project project,
+      final String name, final Props props) throws ProjectManagerException, IOException {
+    final QueryRunner runner = new QueryRunner();
     final String INSERT_PROPERTIES =
         "INSERT INTO project_properties (project_id, version, name, modified_time, encoding_type, property) values (?,?,?,?,?,?)";
 
-    String propertyJSON = PropsUtils.toJSONString(props, true);
+    final String propertyJSON = PropsUtils.toJSONString(props, true);
     byte[] data = propertyJSON.getBytes("UTF-8");
-    if (defaultEncodingType == EncodingType.GZIP) {
+    if (this.defaultEncodingType == EncodingType.GZIP) {
       data = GZIPUtils.gzipBytes(data);
     }
 
     try {
       runner.update(connection, INSERT_PROPERTIES, project.getId(),
           project.getVersion(), name, System.currentTimeMillis(),
-          defaultEncodingType.getNumVal(), data);
+          this.defaultEncodingType.getNumVal(), data);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error uploading project properties "
           + name + " into " + project.getName() + " version "
           + project.getVersion(), e);
@@ -1144,14 +1149,14 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public Props fetchProjectProperty(int projectId, int projectVer,
-      String propsName) throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+  public Props fetchProjectProperty(final int projectId, final int projectVer,
+      final String propsName) throws ProjectManagerException {
+    final QueryRunner runner = createQueryRunner();
 
-    ProjectPropertiesResultsHandler handler =
+    final ProjectPropertiesResultsHandler handler =
         new ProjectPropertiesResultsHandler();
     try {
-      List<Pair<String, Props>> properties =
+      final List<Pair<String, Props>> properties =
           runner.query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTY,
               handler, projectId, projectVer, propsName);
 
@@ -1160,7 +1165,7 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       }
 
       return properties.get(0).getSecond();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error("Error fetching property " + propsName
           + " Project " + projectId + " version " + projectVer, e);
       throw new ProjectManagerException("Error fetching property " + propsName,
@@ -1169,26 +1174,26 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public Props fetchProjectProperty(Project project, String propsName)
+  public Props fetchProjectProperty(final Project project, final String propsName)
       throws ProjectManagerException {
     // TODO: 11/23/16 call the other overloaded method fetchProjectProperty internally.
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
 
-    ProjectPropertiesResultsHandler handler =
+    final ProjectPropertiesResultsHandler handler =
         new ProjectPropertiesResultsHandler();
     try {
-      List<Pair<String, Props>> properties =
+      final List<Pair<String, Props>> properties =
           runner.query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTY,
               handler, project.getId(), project.getVersion(), propsName);
 
       if (properties == null || properties.isEmpty()) {
         logger.warn("Project " + project.getId() + " version " + project.getVersion()
-          + " property " + propsName + " is empty.");
+            + " property " + propsName + " is empty.");
         return null;
       }
 
       return properties.get(0).getSecond();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error("Error fetching property " + propsName
           + "Project " + project.getId() + " version " + project.getVersion(), e);
       throw new ProjectManagerException("Error fetching property " + propsName
@@ -1197,9 +1202,9 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void cleanOlderProjectVersion(int projectId, int version)
+  public void cleanOlderProjectVersion(final int projectId, final int version)
       throws ProjectManagerException {
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
     try {
       cleanOlderProjectVersionFlows(connection, projectId, version);
@@ -1211,58 +1216,58 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
     }
   }
 
-  private void cleanOlderProjectVersionFlows(Connection connection,
-      int projectId, int version) throws ProjectManagerException {
+  private void cleanOlderProjectVersionFlows(final Connection connection,
+      final int projectId, final int version) throws ProjectManagerException {
     final String DELETE_FLOW =
         "DELETE FROM project_flows WHERE project_id=? AND version<?";
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.update(connection, DELETE_FLOW, projectId, version);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error deleting project version flows "
           + projectId + ":" + version, e);
     }
   }
 
-  private void cleanOlderProjectVersionProperties(Connection connection,
-      int projectId, int version) throws ProjectManagerException {
+  private void cleanOlderProjectVersionProperties(final Connection connection,
+      final int projectId, final int version) throws ProjectManagerException {
     final String DELETE_PROPERTIES =
         "DELETE FROM project_properties WHERE project_id=? AND version<?";
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.update(connection, DELETE_PROPERTIES, projectId, version);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException(
           "Error deleting project version properties " + projectId + ":"
               + version, e);
     }
   }
 
-  private void cleanOlderProjectFiles(Connection connection, int projectId,
-      int version) throws ProjectManagerException {
+  private void cleanOlderProjectFiles(final Connection connection, final int projectId,
+      final int version) throws ProjectManagerException {
     final String DELETE_PROJECT_FILES =
         "DELETE FROM project_files WHERE project_id=? AND version<?";
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.update(connection, DELETE_PROJECT_FILES, projectId, version);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException("Error deleting project version files "
           + projectId + ":" + version, e);
     }
   }
 
-  private void cleanOlderProjectVersion(Connection connection, int projectId,
-      int version) throws ProjectManagerException {
+  private void cleanOlderProjectVersion(final Connection connection, final int projectId,
+      final int version) throws ProjectManagerException {
     final String UPDATE_PROJECT_VERSIONS =
         "UPDATE project_versions SET num_chunks=0 WHERE project_id=? AND version<?";
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.update(connection, UPDATE_PROJECT_VERSIONS, projectId, version);
       connection.commit();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new ProjectManagerException(
           "Error updating project version chunksize " + projectId + ":"
               + version, e);
@@ -1270,14 +1275,14 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public Map<String, Props> fetchProjectProperties(int projectId, int version)
+  public Map<String, Props> fetchProjectProperties(final int projectId, final int version)
       throws ProjectManagerException {
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
 
-    ProjectPropertiesResultsHandler handler =
+    final ProjectPropertiesResultsHandler handler =
         new ProjectPropertiesResultsHandler();
     try {
-      List<Pair<String, Props>> properties =
+      final List<Pair<String, Props>> properties =
           runner.query(
               ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTIES,
               handler, projectId, version);
@@ -1286,67 +1291,80 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
         return null;
       }
 
-      HashMap<String, Props> props = new HashMap<String, Props>();
-      for (Pair<String, Props> pair : properties) {
+      final HashMap<String, Props> props = new HashMap<>();
+      for (final Pair<String, Props> pair : properties) {
         props.put(pair.getFirst(), pair.getSecond());
       }
       return props;
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error("Error fetching properties, project id" + projectId + " version " + version, e);
       throw new ProjectManagerException("Error fetching properties", e);
     }
   }
 
+  private Connection getConnection() throws ProjectManagerException {
+    Connection connection = null;
+    try {
+      connection = super.getDBConnection(false);
+    } catch (final Exception e) {
+      DbUtils.closeQuietly(connection);
+      throw new ProjectManagerException("Error getting DB connection.", e);
+    }
+
+    return connection;
+  }
+
   private static class ProjectResultHandler implements
       ResultSetHandler<List<Project>> {
-    private static String SELECT_PROJECT_BY_NAME =
+
+    private static final String SELECT_PROJECT_BY_NAME =
         "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE name=?";
 
-    private static String SELECT_PROJECT_BY_ID =
+    private static final String SELECT_PROJECT_BY_ID =
         "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE id=?";
 
-    private static String SELECT_ALL_ACTIVE_PROJECTS =
+    private static final String SELECT_ALL_ACTIVE_PROJECTS =
         "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE active=true";
 
-    private static String SELECT_ACTIVE_PROJECT_BY_NAME =
+    private static final String SELECT_ACTIVE_PROJECT_BY_NAME =
         "SELECT id, name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob FROM projects WHERE name=? AND active=true";
 
     @Override
-    public List<Project> handle(ResultSet rs) throws SQLException {
+    public List<Project> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
-        return Collections.<Project> emptyList();
+        return Collections.<Project>emptyList();
       }
 
-      ArrayList<Project> projects = new ArrayList<Project>();
+      final ArrayList<Project> projects = new ArrayList<>();
       do {
-        int id = rs.getInt(1);
-        String name = rs.getString(2);
-        boolean active = rs.getBoolean(3);
-        long modifiedTime = rs.getLong(4);
-        long createTime = rs.getLong(5);
-        int version = rs.getInt(6);
-        String lastModifiedBy = rs.getString(7);
-        String description = rs.getString(8);
-        int encodingType = rs.getInt(9);
-        byte[] data = rs.getBytes(10);
-
-        Project project;
+        final int id = rs.getInt(1);
+        final String name = rs.getString(2);
+        final boolean active = rs.getBoolean(3);
+        final long modifiedTime = rs.getLong(4);
+        final long createTime = rs.getLong(5);
+        final int version = rs.getInt(6);
+        final String lastModifiedBy = rs.getString(7);
+        final String description = rs.getString(8);
+        final int encodingType = rs.getInt(9);
+        final byte[] data = rs.getBytes(10);
+
+        final Project project;
         if (data != null) {
-          EncodingType encType = EncodingType.fromInteger(encodingType);
-          Object blobObj;
+          final EncodingType encType = EncodingType.fromInteger(encodingType);
+          final Object blobObj;
           try {
             // Convoluted way to inflate strings. Should find common package or
             // helper function.
             if (encType == EncodingType.GZIP) {
               // Decompress the sucker.
-              String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+              final String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
               blobObj = JSONUtils.parseJSONFromString(jsonString);
             } else {
-              String jsonString = new String(data, "UTF-8");
+              final String jsonString = new String(data, "UTF-8");
               blobObj = JSONUtils.parseJSONFromString(jsonString);
             }
             project = Project.projectFromObject(blobObj);
-          } catch (IOException e) {
+          } catch (final IOException e) {
             throw new SQLException("Failed to get project.", e);
           }
         } else {
@@ -1371,25 +1389,26 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   private static class ProjectPermissionsResultHandler implements
       ResultSetHandler<List<Triple<String, Boolean, Permission>>> {
-    private static String SELECT_PROJECT_PERMISSION =
+
+    private static final String SELECT_PROJECT_PERMISSION =
         "SELECT project_id, modified_time, name, permissions, isGroup FROM project_permissions WHERE project_id=?";
 
     @Override
-    public List<Triple<String, Boolean, Permission>> handle(ResultSet rs)
+    public List<Triple<String, Boolean, Permission>> handle(final ResultSet rs)
         throws SQLException {
       if (!rs.next()) {
-        return Collections.<Triple<String, Boolean, Permission>> emptyList();
+        return Collections.<Triple<String, Boolean, Permission>>emptyList();
       }
 
-      ArrayList<Triple<String, Boolean, Permission>> permissions =
-          new ArrayList<Triple<String, Boolean, Permission>>();
+      final ArrayList<Triple<String, Boolean, Permission>> permissions =
+          new ArrayList<>();
       do {
-        String username = rs.getString(3);
-        int permissionFlag = rs.getInt(4);
-        boolean val = rs.getBoolean(5);
+        final String username = rs.getString(3);
+        final int permissionFlag = rs.getInt(4);
+        final boolean val = rs.getBoolean(5);
 
-        Permission perm = new Permission(permissionFlag);
-        permissions.add(new Triple<String, Boolean, Permission>(username, val,
+        final Permission perm = new Permission(permissionFlag);
+        permissions.add(new Triple<>(username, val,
             perm));
       } while (rs.next());
 
@@ -1399,29 +1418,30 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   private static class ProjectFlowsResultHandler implements
       ResultSetHandler<List<Flow>> {
-    private static String SELECT_PROJECT_FLOW =
+
+    private static final String SELECT_PROJECT_FLOW =
         "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=? AND flow_id=?";
 
-    private static String SELECT_ALL_PROJECT_FLOWS =
+    private static final String SELECT_ALL_PROJECT_FLOWS =
         "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=?";
 
     @Override
-    public List<Flow> handle(ResultSet rs) throws SQLException {
+    public List<Flow> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
-        return Collections.<Flow> emptyList();
+        return Collections.<Flow>emptyList();
       }
 
-      ArrayList<Flow> flows = new ArrayList<Flow>();
+      final ArrayList<Flow> flows = new ArrayList<>();
       do {
-        String flowId = rs.getString(3);
-        int encodingType = rs.getInt(5);
-        byte[] dataBytes = rs.getBytes(6);
+        final String flowId = rs.getString(3);
+        final int encodingType = rs.getInt(5);
+        final byte[] dataBytes = rs.getBytes(6);
 
         if (dataBytes == null) {
           continue;
         }
 
-        EncodingType encType = EncodingType.fromInteger(encodingType);
+        final EncodingType encType = EncodingType.fromInteger(encodingType);
 
         Object flowObj = null;
         try {
@@ -1429,16 +1449,16 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
           // helper function.
           if (encType == EncodingType.GZIP) {
             // Decompress the sucker.
-            String jsonString = GZIPUtils.unGzipString(dataBytes, "UTF-8");
+            final String jsonString = GZIPUtils.unGzipString(dataBytes, "UTF-8");
             flowObj = JSONUtils.parseJSONFromString(jsonString);
           } else {
-            String jsonString = new String(dataBytes, "UTF-8");
+            final String jsonString = new String(dataBytes, "UTF-8");
             flowObj = JSONUtils.parseJSONFromString(jsonString);
           }
 
-          Flow flow = Flow.flowFromObject(flowObj);
+          final Flow flow = Flow.flowFromObject(flowObj);
           flows.add(flow);
-        } catch (IOException e) {
+        } catch (final IOException e) {
           throw new SQLException("Error retrieving flow data " + flowId, e);
         }
 
@@ -1450,26 +1470,27 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   private static class ProjectPropertiesResultsHandler implements
       ResultSetHandler<List<Pair<String, Props>>> {
-    private static String SELECT_PROJECT_PROPERTY =
+
+    private static final String SELECT_PROJECT_PROPERTY =
         "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=? AND name=?";
 
-    private static String SELECT_PROJECT_PROPERTIES =
+    private static final String SELECT_PROJECT_PROPERTIES =
         "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=?";
 
     @Override
-    public List<Pair<String, Props>> handle(ResultSet rs) throws SQLException {
+    public List<Pair<String, Props>> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
-        return Collections.<Pair<String, Props>> emptyList();
+        return Collections.<Pair<String, Props>>emptyList();
       }
 
-      List<Pair<String, Props>> properties =
-          new ArrayList<Pair<String, Props>>();
+      final List<Pair<String, Props>> properties =
+          new ArrayList<>();
       do {
-        String name = rs.getString(3);
-        int eventType = rs.getInt(5);
-        byte[] dataBytes = rs.getBytes(6);
+        final String name = rs.getString(3);
+        final int eventType = rs.getInt(5);
+        final byte[] dataBytes = rs.getBytes(6);
 
-        EncodingType encType = EncodingType.fromInteger(eventType);
+        final EncodingType encType = EncodingType.fromInteger(eventType);
         String propertyString = null;
 
         try {
@@ -1480,10 +1501,10 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
             propertyString = new String(dataBytes, "UTF-8");
           }
 
-          Props props = PropsUtils.fromJSONString(propertyString);
+          final Props props = PropsUtils.fromJSONString(propertyString);
           props.setSource(name);
-          properties.add(new Pair<String, Props>(name, props));
-        } catch (IOException e) {
+          properties.add(new Pair<>(name, props));
+        } catch (final IOException e) {
           throw new SQLException(e);
         }
       } while (rs.next());
@@ -1494,24 +1515,25 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   private static class ProjectLogsResultHandler implements
       ResultSetHandler<List<ProjectLogEvent>> {
-    private static String SELECT_PROJECT_EVENTS_ORDER =
+
+    private static final String SELECT_PROJECT_EVENTS_ORDER =
         "SELECT project_id, event_type, event_time, username, message FROM project_events WHERE project_id=? ORDER BY event_time DESC LIMIT ? OFFSET ?";
 
     @Override
-    public List<ProjectLogEvent> handle(ResultSet rs) throws SQLException {
+    public List<ProjectLogEvent> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
-        return Collections.<ProjectLogEvent> emptyList();
+        return Collections.<ProjectLogEvent>emptyList();
       }
 
-      ArrayList<ProjectLogEvent> events = new ArrayList<ProjectLogEvent>();
+      final ArrayList<ProjectLogEvent> events = new ArrayList<>();
       do {
-        int projectId = rs.getInt(1);
-        int eventType = rs.getInt(2);
-        long eventTime = rs.getLong(3);
-        String username = rs.getString(4);
-        String message = rs.getString(5);
+        final int projectId = rs.getInt(1);
+        final int eventType = rs.getInt(2);
+        final long eventTime = rs.getLong(3);
+        final String username = rs.getString(4);
+        final String message = rs.getString(5);
 
-        ProjectLogEvent event =
+        final ProjectLogEvent event =
             new ProjectLogEvent(projectId, EventType.fromInteger(eventType),
                 eventTime, username, message);
         events.add(event);
@@ -1523,18 +1545,19 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   private static class ProjectFileChunkResultHandler implements
       ResultSetHandler<List<byte[]>> {
-    private static String SELECT_PROJECT_CHUNKS_FILE =
+
+    private static final String SELECT_PROJECT_CHUNKS_FILE =
         "SELECT project_id, version, chunk, size, file FROM project_files WHERE project_id=? AND version=? AND chunk >= ? AND chunk < ? ORDER BY chunk ASC";
 
     @Override
-    public List<byte[]> handle(ResultSet rs) throws SQLException {
+    public List<byte[]> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
-        return Collections.<byte[]> emptyList();
+        return Collections.<byte[]>emptyList();
       }
 
-      ArrayList<byte[]> data = new ArrayList<byte[]>();
+      final ArrayList<byte[]> data = new ArrayList<>();
       do {
-        byte[] bytes = rs.getBytes(5);
+        final byte[] bytes = rs.getBytes(5);
 
         data.add(bytes);
       } while (rs.next());
@@ -1544,31 +1567,34 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
 
   }
 
-  private static class ProjectVersionResultHandler implements ResultSetHandler<List<ProjectFileHandler>> {
-    private static String SELECT_PROJECT_VERSION =
+  private static class ProjectVersionResultHandler implements
+      ResultSetHandler<List<ProjectFileHandler>> {
+
+    private static final String SELECT_PROJECT_VERSION =
         "SELECT project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks, resource_id "
             + "FROM project_versions WHERE project_id=? AND version=?";
 
     @Override
-    public List<ProjectFileHandler> handle(ResultSet rs) throws SQLException {
+    public List<ProjectFileHandler> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
         return null;
       }
 
-      List<ProjectFileHandler> handlers = new ArrayList<ProjectFileHandler>();
+      final List<ProjectFileHandler> handlers = new ArrayList<>();
       do {
-        int projectId = rs.getInt(1);
-        int version = rs.getInt(2);
-        long uploadTime = rs.getLong(3);
-        String uploader = rs.getString(4);
-        String fileType = rs.getString(5);
-        String fileName = rs.getString(6);
-        byte[] md5 = rs.getBytes(7);
-        int numChunks = rs.getInt(8);
-        String resourceId = rs.getString(9);
-
-        ProjectFileHandler handler = new ProjectFileHandler(
-            projectId, version, uploadTime, uploader, fileType, fileName, numChunks, md5, resourceId);
+        final int projectId = rs.getInt(1);
+        final int version = rs.getInt(2);
+        final long uploadTime = rs.getLong(3);
+        final String uploader = rs.getString(4);
+        final String fileType = rs.getString(5);
+        final String fileName = rs.getString(6);
+        final byte[] md5 = rs.getBytes(7);
+        final int numChunks = rs.getInt(8);
+        final String resourceId = rs.getString(9);
+
+        final ProjectFileHandler handler = new ProjectFileHandler(
+            projectId, version, uploadTime, uploader, fileType, fileName, numChunks, md5,
+            resourceId);
 
         handlers.add(handler);
       } while (rs.next());
@@ -1578,11 +1604,12 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
   }
 
   private static class IntHander implements ResultSetHandler<Integer> {
-    private static String SELECT_LATEST_VERSION =
+
+    private static final String SELECT_LATEST_VERSION =
         "SELECT MAX(version) FROM project_versions WHERE project_id=?";
 
     @Override
-    public Integer handle(ResultSet rs) throws SQLException {
+    public Integer handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
         return 0;
       }
@@ -1590,16 +1617,4 @@ public class JdbcProjectLoader extends AbstractJdbcLoader implements
       return rs.getInt(1);
     }
   }
-
-  private Connection getConnection() throws ProjectManagerException {
-    Connection connection = null;
-    try {
-      connection = super.getDBConnection(false);
-    } catch (Exception e) {
-      DbUtils.closeQuietly(connection);
-      throw new ProjectManagerException("Error getting DB connection.", e);
-    }
-
-    return connection;
-  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/Project.java b/azkaban-common/src/main/java/azkaban/project/Project.java
index 0a39709..1ffdb75 100644
--- a/azkaban-common/src/main/java/azkaban/project/Project.java
+++ b/azkaban-common/src/main/java/azkaban/project/Project.java
@@ -16,6 +16,11 @@
 
 package azkaban.project;
 
+import azkaban.flow.Flow;
+import azkaban.user.Permission;
+import azkaban.user.Permission.Type;
+import azkaban.user.User;
+import azkaban.utils.Pair;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
@@ -25,15 +30,15 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import azkaban.flow.Flow;
-import azkaban.user.Permission;
-import azkaban.user.Permission.Type;
-import azkaban.user.User;
-import azkaban.utils.Pair;
-
 public class Project {
+
   private final int id;
   private final String name;
+  private final LinkedHashMap<String, Permission> userPermissionMap =
+      new LinkedHashMap<>();
+  private final LinkedHashMap<String, Permission> groupPermissionMap =
+      new LinkedHashMap<>();
+  private final HashSet<String> proxyUsers = new HashSet<>();
   private boolean active = true;
   private String description;
   private int version = -1;
@@ -41,58 +46,100 @@ public class Project {
   private long lastModifiedTimestamp;
   private String lastModifiedUser;
   private String source;
-  private LinkedHashMap<String, Permission> userPermissionMap =
-      new LinkedHashMap<String, Permission>();
-  private LinkedHashMap<String, Permission> groupPermissionMap =
-      new LinkedHashMap<String, Permission>();
   private Map<String, Flow> flows = null;
-  private HashSet<String> proxyUsers = new HashSet<String>();
-  private Map<String, Object> metadata = new HashMap<String, Object>();
+  private Map<String, Object> metadata = new HashMap<>();
 
-  public Project(int id, String name) {
+  public Project(final int id, final String name) {
     this.id = id;
     this.name = name;
   }
 
-  public String getName() {
-    return name;
+  public static Project projectFromObject(final Object object) {
+    final Map<String, Object> projectObject = (Map<String, Object>) object;
+    final int id = (Integer) projectObject.get("id");
+    final String name = (String) projectObject.get("name");
+    final String description = (String) projectObject.get("description");
+    final String lastModifiedUser = (String) projectObject.get("lastModifiedUser");
+    final long createTimestamp = coerceToLong(projectObject.get("createTimestamp"));
+    final long lastModifiedTimestamp =
+        coerceToLong(projectObject.get("lastModifiedTimestamp"));
+    final String source = (String) projectObject.get("source");
+    Boolean active = (Boolean) projectObject.get("active");
+    active = active == null ? true : active;
+    final int version = (Integer) projectObject.get("version");
+    final Map<String, Object> metadata =
+        (Map<String, Object>) projectObject.get("metadata");
+
+    final Project project = new Project(id, name);
+    project.setVersion(version);
+    project.setDescription(description);
+    project.setCreateTimestamp(createTimestamp);
+    project.setLastModifiedTimestamp(lastModifiedTimestamp);
+    project.setLastModifiedUser(lastModifiedUser);
+    project.setActive(active);
+
+    if (source != null) {
+      project.setSource(source);
+    }
+    if (metadata != null) {
+      project.setMetadata(metadata);
+    }
+
+    final List<String> proxyUserList = (List<String>) projectObject.get("proxyUsers");
+    project.addAllProxyUsers(proxyUserList);
+
+    return project;
   }
 
-  public void setFlows(Map<String, Flow> flows) {
-    this.flows = flows;
+  private static long coerceToLong(final Object obj) {
+    if (obj == null) {
+      return 0;
+    } else if (obj instanceof Integer) {
+      return (Integer) obj;
+    }
+
+    return (Long) obj;
   }
 
-  public Flow getFlow(String flowId) {
-    if (flows == null) {
+  public String getName() {
+    return this.name;
+  }
+
+  public Flow getFlow(final String flowId) {
+    if (this.flows == null) {
       return null;
     }
 
-    return flows.get(flowId);
+    return this.flows.get(flowId);
   }
 
   public Map<String, Flow> getFlowMap() {
-    return flows;
+    return this.flows;
   }
 
   public List<Flow> getFlows() {
     List<Flow> retFlow = null;
-    if (flows != null) {
-      retFlow = new ArrayList<Flow>(flows.values());
+    if (this.flows != null) {
+      retFlow = new ArrayList<>(this.flows.values());
     } else {
-      retFlow = new ArrayList<Flow>();
+      retFlow = new ArrayList<>();
     }
     return retFlow;
   }
 
-  public Permission getCollectivePermission(User user) {
-    Permission permissions = new Permission();
-    Permission perm = userPermissionMap.get(user.getUserId());
+  public void setFlows(final Map<String, Flow> flows) {
+    this.flows = flows;
+  }
+
+  public Permission getCollectivePermission(final User user) {
+    final Permission permissions = new Permission();
+    Permission perm = this.userPermissionMap.get(user.getUserId());
     if (perm != null) {
       permissions.addPermissions(perm);
     }
 
-    for (String group : user.getGroups()) {
-      perm = groupPermissionMap.get(group);
+    for (final String group : user.getGroups()) {
+      perm = this.groupPermissionMap.get(group);
       if (perm != null) {
         permissions.addPermissions(perm);
       }
@@ -102,27 +149,27 @@ public class Project {
   }
 
   public Set<String> getProxyUsers() {
-    return new HashSet<String>(proxyUsers);
+    return new HashSet<>(this.proxyUsers);
   }
 
-  public void addAllProxyUsers(Collection<String> proxyUsers) {
+  public void addAllProxyUsers(final Collection<String> proxyUsers) {
     this.proxyUsers.addAll(proxyUsers);
   }
 
-  public boolean hasProxyUser(String proxy) {
+  public boolean hasProxyUser(final String proxy) {
     return this.proxyUsers.contains(proxy);
   }
 
-  public void addProxyUser(String user) {
+  public void addProxyUser(final String user) {
     this.proxyUsers.add(user);
   }
 
-  public void removeProxyUser(String user) {
+  public void removeProxyUser(final String user) {
     this.proxyUsers.remove(user);
   }
 
-  public boolean hasPermission(User user, Type type) {
-    Permission perm = userPermissionMap.get(user.getUserId());
+  public boolean hasPermission(final User user, final Type type) {
+    final Permission perm = this.userPermissionMap.get(user.getUserId());
     if (perm != null
         && (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type))) {
       return true;
@@ -131,8 +178,8 @@ public class Project {
     return hasGroupPermission(user, type);
   }
 
-  public boolean hasUserPermission(User user, Type type) {
-    Permission perm = userPermissionMap.get(user.getUserId());
+  public boolean hasUserPermission(final User user, final Type type) {
+    final Permission perm = this.userPermissionMap.get(user.getUserId());
     if (perm == null) {
       // Check group
       return false;
@@ -145,9 +192,9 @@ public class Project {
     return false;
   }
 
-  public boolean hasGroupPermission(User user, Type type) {
-    for (String group : user.getGroups()) {
-      Permission perm = groupPermissionMap.get(group);
+  public boolean hasGroupPermission(final User user, final Type type) {
+    for (final String group : user.getGroups()) {
+      final Permission perm = this.groupPermissionMap.get(group);
       if (perm != null) {
         if (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type)) {
           return true;
@@ -158,10 +205,10 @@ public class Project {
     return false;
   }
 
-  public List<String> getUsersWithPermission(Type type) {
-    ArrayList<String> users = new ArrayList<String>();
-    for (Map.Entry<String, Permission> entry : userPermissionMap.entrySet()) {
-      Permission perm = entry.getValue();
+  public List<String> getUsersWithPermission(final Type type) {
+    final ArrayList<String> users = new ArrayList<>();
+    for (final Map.Entry<String, Permission> entry : this.userPermissionMap.entrySet()) {
+      final Permission perm = entry.getValue();
       if (perm.isPermissionSet(type)) {
         users.add(entry.getKey());
       }
@@ -170,11 +217,11 @@ public class Project {
   }
 
   public List<Pair<String, Permission>> getUserPermissions() {
-    ArrayList<Pair<String, Permission>> permissions =
-        new ArrayList<Pair<String, Permission>>();
+    final ArrayList<Pair<String, Permission>> permissions =
+        new ArrayList<>();
 
-    for (Map.Entry<String, Permission> entry : userPermissionMap.entrySet()) {
-      permissions.add(new Pair<String, Permission>(entry.getKey(), entry
+    for (final Map.Entry<String, Permission> entry : this.userPermissionMap.entrySet()) {
+      permissions.add(new Pair<>(entry.getKey(), entry
           .getValue()));
     }
 
@@ -182,154 +229,106 @@ public class Project {
   }
 
   public List<Pair<String, Permission>> getGroupPermissions() {
-    ArrayList<Pair<String, Permission>> permissions =
-        new ArrayList<Pair<String, Permission>>();
+    final ArrayList<Pair<String, Permission>> permissions =
+        new ArrayList<>();
 
-    for (Map.Entry<String, Permission> entry : groupPermissionMap.entrySet()) {
-      permissions.add(new Pair<String, Permission>(entry.getKey(), entry
+    for (final Map.Entry<String, Permission> entry : this.groupPermissionMap.entrySet()) {
+      permissions.add(new Pair<>(entry.getKey(), entry
           .getValue()));
     }
 
     return permissions;
   }
 
-  public void setDescription(String description) {
-    this.description = description;
+  public String getDescription() {
+    return this.description;
   }
 
-  public String getDescription() {
-    return description;
+  public void setDescription(final String description) {
+    this.description = description;
   }
 
-  public void setUserPermission(String userid, Permission perm) {
-    userPermissionMap.put(userid, perm);
+  public void setUserPermission(final String userid, final Permission perm) {
+    this.userPermissionMap.put(userid, perm);
   }
 
-  public void setGroupPermission(String group, Permission perm) {
-    groupPermissionMap.put(group, perm);
+  public void setGroupPermission(final String group, final Permission perm) {
+    this.groupPermissionMap.put(group, perm);
   }
 
-  public Permission getUserPermission(User user) {
-    return userPermissionMap.get(user.getUserId());
+  public Permission getUserPermission(final User user) {
+    return this.userPermissionMap.get(user.getUserId());
   }
 
-  public Permission getGroupPermission(String group) {
-    return groupPermissionMap.get(group);
+  public Permission getGroupPermission(final String group) {
+    return this.groupPermissionMap.get(group);
   }
 
-  public Permission getUserPermission(String userID) {
-    return userPermissionMap.get(userID);
+  public Permission getUserPermission(final String userID) {
+    return this.userPermissionMap.get(userID);
   }
 
-  public void removeGroupPermission(String group) {
-    groupPermissionMap.remove(group);
+  public void removeGroupPermission(final String group) {
+    this.groupPermissionMap.remove(group);
   }
 
-  public void removeUserPermission(String userId) {
-    userPermissionMap.remove(userId);
+  public void removeUserPermission(final String userId) {
+    this.userPermissionMap.remove(userId);
   }
 
   public void clearUserPermission() {
-    userPermissionMap.clear();
+    this.userPermissionMap.clear();
   }
 
   public long getCreateTimestamp() {
-    return createTimestamp;
+    return this.createTimestamp;
   }
 
-  public void setCreateTimestamp(long createTimestamp) {
+  public void setCreateTimestamp(final long createTimestamp) {
     this.createTimestamp = createTimestamp;
   }
 
   public long getLastModifiedTimestamp() {
-    return lastModifiedTimestamp;
+    return this.lastModifiedTimestamp;
   }
 
-  public void setLastModifiedTimestamp(long lastModifiedTimestamp) {
+  public void setLastModifiedTimestamp(final long lastModifiedTimestamp) {
     this.lastModifiedTimestamp = lastModifiedTimestamp;
   }
 
   public Object toObject() {
-    HashMap<String, Object> projectObject = new HashMap<String, Object>();
-    projectObject.put("id", id);
-    projectObject.put("name", name);
-    projectObject.put("description", description);
-    projectObject.put("createTimestamp", createTimestamp);
-    projectObject.put("lastModifiedTimestamp", lastModifiedTimestamp);
-    projectObject.put("lastModifiedUser", lastModifiedUser);
-    projectObject.put("version", version);
-
-    if (!active) {
+    final HashMap<String, Object> projectObject = new HashMap<>();
+    projectObject.put("id", this.id);
+    projectObject.put("name", this.name);
+    projectObject.put("description", this.description);
+    projectObject.put("createTimestamp", this.createTimestamp);
+    projectObject.put("lastModifiedTimestamp", this.lastModifiedTimestamp);
+    projectObject.put("lastModifiedUser", this.lastModifiedUser);
+    projectObject.put("version", this.version);
+
+    if (!this.active) {
       projectObject.put("active", false);
     }
 
-    if (source != null) {
-      projectObject.put("source", source);
+    if (this.source != null) {
+      projectObject.put("source", this.source);
     }
 
-    if (metadata != null) {
-      projectObject.put("metadata", metadata);
+    if (this.metadata != null) {
+      projectObject.put("metadata", this.metadata);
     }
 
-    ArrayList<String> proxyUserList = new ArrayList<String>(proxyUsers);
+    final ArrayList<String> proxyUserList = new ArrayList<>(this.proxyUsers);
     projectObject.put("proxyUsers", proxyUserList);
 
     return projectObject;
   }
 
-  @SuppressWarnings("unchecked")
-  public static Project projectFromObject(Object object) {
-    Map<String, Object> projectObject = (Map<String, Object>) object;
-    int id = (Integer) projectObject.get("id");
-    String name = (String) projectObject.get("name");
-    String description = (String) projectObject.get("description");
-    String lastModifiedUser = (String) projectObject.get("lastModifiedUser");
-    long createTimestamp = coerceToLong(projectObject.get("createTimestamp"));
-    long lastModifiedTimestamp =
-        coerceToLong(projectObject.get("lastModifiedTimestamp"));
-    String source = (String) projectObject.get("source");
-    Boolean active = (Boolean) projectObject.get("active");
-    active = active == null ? true : active;
-    int version = (Integer) projectObject.get("version");
-    Map<String, Object> metadata =
-        (Map<String, Object>) projectObject.get("metadata");
-
-    Project project = new Project(id, name);
-    project.setVersion(version);
-    project.setDescription(description);
-    project.setCreateTimestamp(createTimestamp);
-    project.setLastModifiedTimestamp(lastModifiedTimestamp);
-    project.setLastModifiedUser(lastModifiedUser);
-    project.setActive(active);
-
-    if (source != null) {
-      project.setSource(source);
-    }
-    if (metadata != null) {
-      project.setMetadata(metadata);
-    }
-
-    List<String> proxyUserList = (List<String>) projectObject.get("proxyUsers");
-    project.addAllProxyUsers(proxyUserList);
-
-    return project;
-  }
-
-  private static long coerceToLong(Object obj) {
-    if (obj == null) {
-      return 0;
-    } else if (obj instanceof Integer) {
-      return (Integer) obj;
-    }
-
-    return (Long) obj;
-  }
-
   public String getLastModifiedUser() {
-    return lastModifiedUser;
+    return this.lastModifiedUser;
   }
 
-  public void setLastModifiedUser(String lastModifiedUser) {
+  public void setLastModifiedUser(final String lastModifiedUser) {
     this.lastModifiedUser = lastModifiedUser;
   }
 
@@ -337,102 +336,118 @@ public class Project {
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + (active ? 1231 : 1237);
+    result = prime * result + (this.active ? 1231 : 1237);
     result =
-        prime * result + (int) (createTimestamp ^ (createTimestamp >>> 32));
+        prime * result + (int) (this.createTimestamp ^ (this.createTimestamp >>> 32));
     result =
-        prime * result + ((description == null) ? 0 : description.hashCode());
-    result = prime * result + id;
+        prime * result + ((this.description == null) ? 0 : this.description.hashCode());
+    result = prime * result + this.id;
     result =
         prime * result
-            + (int) (lastModifiedTimestamp ^ (lastModifiedTimestamp >>> 32));
+            + (int) (this.lastModifiedTimestamp ^ (this.lastModifiedTimestamp >>> 32));
     result =
         prime * result
-            + ((lastModifiedUser == null) ? 0 : lastModifiedUser.hashCode());
-    result = prime * result + ((name == null) ? 0 : name.hashCode());
-    result = prime * result + ((source == null) ? 0 : source.hashCode());
-    result = prime * result + version;
+            + ((this.lastModifiedUser == null) ? 0 : this.lastModifiedUser.hashCode());
+    result = prime * result + ((this.name == null) ? 0 : this.name.hashCode());
+    result = prime * result + ((this.source == null) ? 0 : this.source.hashCode());
+    result = prime * result + this.version;
     return result;
   }
 
   @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
+  public boolean equals(final Object obj) {
+    if (this == obj) {
       return true;
-    if (obj == null)
+    }
+    if (obj == null) {
       return false;
-    if (getClass() != obj.getClass())
+    }
+    if (getClass() != obj.getClass()) {
       return false;
-    Project other = (Project) obj;
-    if (active != other.active)
+    }
+    final Project other = (Project) obj;
+    if (this.active != other.active) {
       return false;
-    if (createTimestamp != other.createTimestamp)
+    }
+    if (this.createTimestamp != other.createTimestamp) {
       return false;
-    if (description == null) {
-      if (other.description != null)
+    }
+    if (this.description == null) {
+      if (other.description != null) {
         return false;
-    } else if (!description.equals(other.description))
+      }
+    } else if (!this.description.equals(other.description)) {
       return false;
-    if (id != other.id)
+    }
+    if (this.id != other.id) {
       return false;
-    if (lastModifiedTimestamp != other.lastModifiedTimestamp)
+    }
+    if (this.lastModifiedTimestamp != other.lastModifiedTimestamp) {
       return false;
-    if (lastModifiedUser == null) {
-      if (other.lastModifiedUser != null)
+    }
+    if (this.lastModifiedUser == null) {
+      if (other.lastModifiedUser != null) {
         return false;
-    } else if (!lastModifiedUser.equals(other.lastModifiedUser))
+      }
+    } else if (!this.lastModifiedUser.equals(other.lastModifiedUser)) {
       return false;
-    if (name == null) {
-      if (other.name != null)
+    }
+    if (this.name == null) {
+      if (other.name != null) {
         return false;
-    } else if (!name.equals(other.name))
+      }
+    } else if (!this.name.equals(other.name)) {
       return false;
-    if (source == null) {
-      if (other.source != null)
+    }
+    if (this.source == null) {
+      if (other.source != null) {
         return false;
-    } else if (!source.equals(other.source))
+      }
+    } else if (!this.source.equals(other.source)) {
       return false;
-    if (version != other.version)
+    }
+    if (this.version != other.version) {
       return false;
+    }
     return true;
   }
 
   public String getSource() {
-    return source;
+    return this.source;
   }
 
-  public void setSource(String source) {
+  public void setSource(final String source) {
     this.source = source;
   }
 
   public Map<String, Object> getMetadata() {
-    if (metadata == null) {
-      metadata = new HashMap<String, Object>();
+    if (this.metadata == null) {
+      this.metadata = new HashMap<>();
     }
-    return metadata;
+    return this.metadata;
   }
 
-  protected void setMetadata(Map<String, Object> metadata) {
+  protected void setMetadata(final Map<String, Object> metadata) {
     this.metadata = metadata;
   }
 
   public int getId() {
-    return id;
+    return this.id;
   }
 
   public boolean isActive() {
-    return active;
+    return this.active;
   }
 
-  public void setActive(boolean active) {
+  public void setActive(final boolean active) {
     this.active = active;
   }
 
   public int getVersion() {
-    return version;
+    return this.version;
   }
 
-  public void setVersion(int version) {
+  public void setVersion(final int version) {
     this.version = version;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectFileHandler.java b/azkaban-common/src/main/java/azkaban/project/ProjectFileHandler.java
index b1edeb0..f56aef3 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectFileHandler.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectFileHandler.java
@@ -19,6 +19,7 @@ package azkaban.project;
 import java.io.File;
 
 public class ProjectFileHandler {
+
   private final int projectId;
   private final int version;
   private final long uploadTime;
@@ -32,15 +33,15 @@ public class ProjectFileHandler {
   private File localFile = null;
 
   public ProjectFileHandler(
-      int projectId,
-      int version,
-      long uploadTime,
-      String uploader,
-      String fileType,
-      String fileName,
-      int numChunks,
-      byte[] md5Hash,
-      String resourceId) {
+      final int projectId,
+      final int version,
+      final long uploadTime,
+      final String uploader,
+      final String fileType,
+      final String fileName,
+      final int numChunks,
+      final byte[] md5Hash,
+      final String resourceId) {
     this.projectId = projectId;
     this.version = version;
     this.uploadTime = uploadTime;
@@ -53,53 +54,53 @@ public class ProjectFileHandler {
   }
 
   public int getProjectId() {
-    return projectId;
+    return this.projectId;
   }
 
   public int getVersion() {
-    return version;
+    return this.version;
   }
 
   public long getUploadTime() {
-    return uploadTime;
+    return this.uploadTime;
   }
 
   public String getFileType() {
-    return fileType;
+    return this.fileType;
   }
 
   public String getFileName() {
-    return fileName;
+    return this.fileName;
   }
 
   public byte[] getMd5Hash() {
-    return md5Hash;
+    return this.md5Hash;
   }
 
   public File getLocalFile() {
-    return localFile;
+    return this.localFile;
   }
 
-  public synchronized void setLocalFile(File localFile) {
+  public synchronized void setLocalFile(final File localFile) {
     this.localFile = localFile;
   }
 
   public synchronized void deleteLocalFile() {
-    if (localFile != null) {
-      localFile.delete();
-      localFile = null;
+    if (this.localFile != null) {
+      this.localFile.delete();
+      this.localFile = null;
     }
   }
 
   public String getUploader() {
-    return uploader;
+    return this.uploader;
   }
 
   public int getNumChunks() {
-    return numChunks;
+    return this.numChunks;
   }
 
   public String getResourceId() {
-    return resourceId;
+    return this.resourceId;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectLoader.java b/azkaban-common/src/main/java/azkaban/project/ProjectLoader.java
index e1142ed..84d320e 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectLoader.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectLoader.java
@@ -16,42 +16,31 @@
 
 package azkaban.project;
 
-import java.io.File;
-import java.util.Collection;
-import java.util.List;
-import java.util.Map;
-
 import azkaban.flow.Flow;
 import azkaban.project.ProjectLogEvent.EventType;
 import azkaban.user.Permission;
 import azkaban.user.User;
 import azkaban.utils.Props;
 import azkaban.utils.Triple;
+import java.io.File;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
 
 public interface ProjectLoader {
 
   /**
    * Returns all projects which are active
-   *
-   * @return
-   * @throws ProjectManagerException
    */
   List<Project> fetchAllActiveProjects() throws ProjectManagerException;
 
   /**
    * Loads whole project, including permissions, by the project id.
-   *
-   * @param id
-   * @return
-   * @throws ProjectManagerException
    */
   Project fetchProjectById(int id) throws ProjectManagerException;
 
   /**
    * Loads whole project, including permissions, by the project name.
-   * @param name
-   * @return
-   * @throws ProjectManagerException
    */
   Project fetchProjectByName(String name) throws ProjectManagerException;
 
@@ -64,19 +53,13 @@ public interface ProjectLoader {
    * If the name and description of the project exceeds the store's constraints,
    * it will throw an exception.
    *
-   * @param name
-   * @return
-   * @throws ProjectManagerException if an active project of the same name
-   *           exists.
+   * @throws ProjectManagerException if an active project of the same name exists.
    */
   Project createNewProject(String name, String description, User creator)
       throws ProjectManagerException;
 
   /**
    * Removes the project by marking it inactive.
-   *
-   * @param project
-   * @throws ProjectManagerException
    */
   void removeProject(Project project, String user)
       throws ProjectManagerException;
@@ -85,12 +68,6 @@ public interface ProjectLoader {
    * Adds and updates the user permissions. Does not check if the user is valid.
    * If the permission doesn't exist, it adds. If the permission exists, it
    * updates.
-   *
-   * @param project
-   * @param name
-   * @param perm
-   * @param isGroup
-   * @throws ProjectManagerException
    */
   void updatePermission(Project project, String name, Permission perm,
       boolean isGroup) throws ProjectManagerException;
@@ -100,10 +77,6 @@ public interface ProjectLoader {
 
   /**
    * Modifies and commits the project description.
-   *
-   * @param project
-   * @param description
-   * @throws ProjectManagerException
    */
   void updateDescription(Project project, String description, String user)
       throws ProjectManagerException;
@@ -112,8 +85,6 @@ public interface ProjectLoader {
    * Stores logs for a particular project. Will soft fail rather than throw
    * exception.
    *
-   * @param project
-   * @param type
    * @param message return true if the posting was success.
    */
   boolean postEvent(Project project, EventType type, String user,
@@ -121,9 +92,6 @@ public interface ProjectLoader {
 
   /**
    * Returns all the events for a project sorted
-   *
-   * @param project
-   * @return
    */
   List<ProjectLogEvent> getProjectEvents(Project project, int num,
       int skip) throws ProjectManagerException;
@@ -135,18 +103,11 @@ public interface ProjectLoader {
       throws ProjectManagerException;
 
   /**
-   * Add project and version info to the project_versions table. This current maintains the metadata for each uploaded
-   * version of the project
-   *
-   * @param projectId
-   * @param version
-   * @param localFile
-   * @param uploader
-   * @param md5
-   * @param resourceId
-   * @throws ProjectManagerException
+   * Add project and version info to the project_versions table. This current maintains the metadata
+   * for each uploaded version of the project
    */
-  void addProjectVersion(int projectId, int version, File localFile, String uploader, byte[] md5, String resourceId)
+  void addProjectVersion(int projectId, int version, File localFile, String uploader, byte[] md5,
+      String resourceId)
       throws ProjectManagerException;
 
   /**
@@ -160,18 +121,12 @@ public interface ProjectLoader {
 
   /**
    * Get file that's uploaded.
-   *
-   * @return
    */
   ProjectFileHandler getUploadedFile(int projectId, int version)
       throws ProjectManagerException;
 
   /**
    * Changes and commits different project version.
-   *
-   * @param project
-   * @param version
-   * @throws ProjectManagerException
    */
   void changeProjectVersion(Project project, int version, String user)
       throws ProjectManagerException;
@@ -181,44 +136,24 @@ public interface ProjectLoader {
 
   /**
    * Uploads all computed flows
-   *
-   * @param project
-   * @param version
-   * @param flows
-   * @throws ProjectManagerException
    */
   void uploadFlows(Project project, int version, Collection<Flow> flows)
       throws ProjectManagerException;
 
   /**
    * Upload just one flow.
-   *
-   * @param project
-   * @param version
-   * @param flow
-   * @throws ProjectManagerException
    */
   void uploadFlow(Project project, int version, Flow flow)
       throws ProjectManagerException;
 
   /**
    * Fetches one particular flow.
-   *
-   * @param project
-   * @param version
-   * @param flowId
-   * @throws ProjectManagerException
    */
   Flow fetchFlow(Project project, String flowId)
       throws ProjectManagerException;
 
   /**
    * Fetches all flows.
-   *
-   * @param project
-   * @param version
-   * @param flowId
-   * @throws ProjectManagerException
    */
   List<Flow> fetchAllProjectFlows(Project project)
       throws ProjectManagerException;
@@ -231,53 +166,30 @@ public interface ProjectLoader {
 
   /**
    * Upload Project properties
-   *
-   * @param project
-   * @param path
-   * @param properties
-   * @throws ProjectManagerException
    */
   void uploadProjectProperty(Project project, Props props)
       throws ProjectManagerException;
 
   /**
    * Upload Project properties. Map contains key value of path and properties
-   *
-   * @param project
-   * @param path
-   * @param properties
-   * @throws ProjectManagerException
    */
   void uploadProjectProperties(Project project, List<Props> properties)
       throws ProjectManagerException;
 
   /**
    * Fetch project properties
-   *
-   * @param project
-   * @param propsName
-   * @return
-   * @throws ProjectManagerException
    */
   Props fetchProjectProperty(Project project, String propsName)
       throws ProjectManagerException;
 
   /**
    * Fetch all project properties
-   *
-   * @param project
-   * @return
-   * @throws ProjectManagerException
    */
   Map<String, Props> fetchProjectProperties(int projectId, int version)
       throws ProjectManagerException;
 
   /**
    * Cleans all project versions less tha
-   *
-   * @param projectId
-   * @param version
-   * @throws ProjectManagerException
    */
   void cleanOlderProjectVersion(int projectId, int version)
       throws ProjectManagerException;
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectLogEvent.java b/azkaban-common/src/main/java/azkaban/project/ProjectLogEvent.java
index 451e402..57b2886 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectLogEvent.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectLogEvent.java
@@ -17,6 +17,43 @@
 package azkaban.project;
 
 public class ProjectLogEvent {
+
+  private final int projectId;
+  private final String user;
+  private final long time;
+  private final EventType type;
+  private final String message;
+
+  public ProjectLogEvent(final int projectId, final EventType type, final long time,
+      final String user,
+      final String message) {
+    this.projectId = projectId;
+    this.user = user;
+    this.time = time;
+    this.type = type;
+    this.message = message;
+  }
+
+  public int getProjectId() {
+    return this.projectId;
+  }
+
+  public String getUser() {
+    return this.user;
+  }
+
+  public long getTime() {
+    return this.time;
+  }
+
+  public EventType getType() {
+    return this.type;
+  }
+
+  public String getMessage() {
+    return this.message;
+  }
+
   /**
    * Log event type messages. Do not change the numeric representation of each enum.
    *
@@ -38,15 +75,11 @@ public class ProjectLogEvent {
 
     private final int numVal;
 
-    EventType(int numVal) {
+    EventType(final int numVal) {
       this.numVal = numVal;
     }
 
-    public int getNumVal() {
-      return numVal;
-    }
-
-    public static EventType fromInteger(int x) {
+    public static EventType fromInteger(final int x) {
       switch (x) {
         case 1:
           return CREATED;
@@ -76,41 +109,10 @@ public class ProjectLogEvent {
           return ERROR;
       }
     }
-  }
-
-  private final int projectId;
-  private final String user;
-  private final long time;
-  private final EventType type;
-  private final String message;
-
-  public ProjectLogEvent(int projectId, EventType type, long time, String user,
-      String message) {
-    this.projectId = projectId;
-    this.user = user;
-    this.time = time;
-    this.type = type;
-    this.message = message;
-  }
 
-  public int getProjectId() {
-    return projectId;
-  }
-
-  public String getUser() {
-    return user;
-  }
-
-  public long getTime() {
-    return time;
-  }
-
-  public EventType getType() {
-    return type;
-  }
-
-  public String getMessage() {
-    return message;
+    public int getNumVal() {
+      return this.numVal;
+    }
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectManager.java b/azkaban-common/src/main/java/azkaban/project/ProjectManager.java
index da5c351..15c1ce0 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectManager.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectManager.java
@@ -16,21 +16,7 @@
 
 package azkaban.project;
 
-import azkaban.storage.StorageManager;
-import com.google.inject.Inject;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.regex.Pattern;
-import java.util.regex.PatternSyntaxException;
-import java.util.zip.ZipFile;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
+import static java.util.Objects.requireNonNull;
 
 import azkaban.flow.Flow;
 import azkaban.project.ProjectLogEvent.EventType;
@@ -39,17 +25,30 @@ import azkaban.project.validator.ValidationStatus;
 import azkaban.project.validator.ValidatorConfigs;
 import azkaban.project.validator.ValidatorManager;
 import azkaban.project.validator.XmlValidatorManager;
+import azkaban.storage.StorageManager;
 import azkaban.user.Permission;
 import azkaban.user.Permission.Type;
 import azkaban.user.User;
 import azkaban.utils.Props;
 import azkaban.utils.PropsUtils;
 import azkaban.utils.Utils;
-
-import static java.util.Objects.*;
+import com.google.inject.Inject;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+import java.util.zip.ZipFile;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
 
 
 public class ProjectManager {
+
   private static final Logger logger = Logger.getLogger(ProjectManager.class);
   private final ProjectLoader projectLoader;
   private final StorageManager storageManager;
@@ -59,7 +58,8 @@ public class ProjectManager {
   private final boolean creatorDefaultPermissions;
 
   @Inject
-  public ProjectManager(ProjectLoader loader, StorageManager storageManager, Props props) {
+  public ProjectManager(final ProjectLoader loader, final StorageManager storageManager,
+      final Props props) {
     this.projectLoader = requireNonNull(loader);
     this.storageManager = requireNonNull(storageManager);
     this.props = requireNonNull(props);
@@ -68,20 +68,20 @@ public class ProjectManager {
     this.projectVersionRetention =
         (props.getInt("project.version.retention", 3));
     logger.info("Project version retention is set to "
-        + projectVersionRetention);
+        + this.projectVersionRetention);
 
     this.creatorDefaultPermissions =
         props.getBoolean("creator.default.proxy", true);
 
-    if (!tempDir.exists()) {
-      tempDir.mkdirs();
+    if (!this.tempDir.exists()) {
+      this.tempDir.mkdirs();
     }
 
     // The prop passed to XmlValidatorManager is used to initialize all the
     // validators
     // Each validator will take certain key/value pairs from the prop to
     // initialize itself.
-    Props prop = new Props(props);
+    final Props prop = new Props(props);
     prop.put(ValidatorConfigs.PROJECT_ARCHIVE_FILE_PATH, "initialize");
     // By instantiating an object of XmlValidatorManager, this will verify the
     // config files for the validators.
@@ -89,41 +89,41 @@ public class ProjectManager {
     loadProjectWhiteList();
   }
 
-  public void loadAllProjectFlows(Project project) {
+  public void loadAllProjectFlows(final Project project) {
     try {
-      List<Flow> flows = projectLoader.fetchAllProjectFlows(project);
-      Map<String, Flow> flowMap = new HashMap<String, Flow>();
-      for (Flow flow : flows) {
+      final List<Flow> flows = this.projectLoader.fetchAllProjectFlows(project);
+      final Map<String, Flow> flowMap = new HashMap<>();
+      for (final Flow flow : flows) {
         flowMap.put(flow.getId(), flow);
       }
 
       project.setFlows(flowMap);
-    } catch (ProjectManagerException e) {
+    } catch (final ProjectManagerException e) {
       throw new RuntimeException("Could not load projects flows from store.", e);
     }
   }
 
   public Props getProps() {
-    return props;
+    return this.props;
   }
 
-  public List<Project> getUserProjects(User user) {
-    ArrayList<Project> userProjects = new ArrayList<>();
-    for (Project project : getProjects()) {
-      Permission perm = project.getUserPermission(user);
+  public List<Project> getUserProjects(final User user) {
+    final ArrayList<Project> userProjects = new ArrayList<>();
+    for (final Project project : getProjects()) {
+      final Permission perm = project.getUserPermission(user);
 
       if (perm != null
           && (perm.isPermissionSet(Type.ADMIN) || perm
-              .isPermissionSet(Type.READ))) {
+          .isPermissionSet(Type.READ))) {
         userProjects.add(project);
       }
     }
     return userProjects;
   }
 
-  public List<Project> getGroupProjects(User user) {
-    List<Project> groupProjects = new ArrayList<>();
-    for (Project project : getProjects()) {
+  public List<Project> getGroupProjects(final User user) {
+    final List<Project> groupProjects = new ArrayList<>();
+    for (final Project project : getProjects()) {
       if (project.hasGroupPermission(user, Type.READ)) {
         groupProjects.add(project);
       }
@@ -131,21 +131,21 @@ public class ProjectManager {
     return groupProjects;
   }
 
-  public List<Project> getUserProjectsByRegex(User user, String regexPattern) {
-    List<Project> userProjects = new ArrayList<>();
-    Pattern pattern;
+  public List<Project> getUserProjectsByRegex(final User user, final String regexPattern) {
+    final List<Project> userProjects = new ArrayList<>();
+    final Pattern pattern;
     try {
       pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
-    } catch (PatternSyntaxException e) {
+    } catch (final PatternSyntaxException e) {
       logger.error("Bad regex pattern " + regexPattern);
       return userProjects;
     }
-    for (Project project : getProjects()) {
-      Permission perm = project.getUserPermission(user);
+    for (final Project project : getProjects()) {
+      final Permission perm = project.getUserPermission(user);
 
       if (perm != null
           && (perm.isPermissionSet(Type.ADMIN) || perm
-              .isPermissionSet(Type.READ))) {
+          .isPermissionSet(Type.READ))) {
         if (pattern.matcher(project.getName()).find()) {
           userProjects.add(project);
         }
@@ -155,25 +155,25 @@ public class ProjectManager {
   }
 
   public List<Project> getProjects() {
-    List<Project> projects;
+    final List<Project> projects;
     try {
-      projects = projectLoader.fetchAllActiveProjects();
-    } catch (ProjectManagerException e) {
+      projects = this.projectLoader.fetchAllActiveProjects();
+    } catch (final ProjectManagerException e) {
       throw new RuntimeException("Could not load projects from store.", e);
     }
     return projects;
   }
 
-  public List<Project> getProjectsByRegex(String regexPattern) {
-    List<Project> allProjects = new ArrayList<Project>();
-    Pattern pattern;
+  public List<Project> getProjectsByRegex(final String regexPattern) {
+    final List<Project> allProjects = new ArrayList<>();
+    final Pattern pattern;
     try {
       pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE);
-    } catch (PatternSyntaxException e) {
+    } catch (final PatternSyntaxException e) {
       logger.error("Bad regex pattern " + regexPattern);
       return allProjects;
     }
-    for (Project project : getProjects()) {
+    for (final Project project : getProjects()) {
       if (pattern.matcher(project.getName()).find()) {
         allProjects.add(project);
       }
@@ -181,51 +181,43 @@ public class ProjectManager {
     return allProjects;
   }
 
-    /**
-     * Checks if a project is active using project_id
-     *
-     * @param id
-     */
-    public Boolean isActiveProject(int id) {
-      return getProject(id) != null;
-    }
+  /**
+   * Checks if a project is active using project_id
+   */
+  public Boolean isActiveProject(final int id) {
+    return getProject(id) != null;
+  }
 
-    /**
-     * fetch active project (boolean active = true) from DB by project_name
-     *
-     * @param name
-     * @return
-     */
-    public Project getProject(String name) {
-        Project fetchedProject = null;
-        try {
-            fetchedProject = projectLoader.fetchProjectByName(name);
-            loadAllProjectFlows(fetchedProject);
-        } catch (ProjectManagerException e) {
-            logger.error("Could not load project" + name + " from store.", e);
-        }
-        return fetchedProject;
+  /**
+   * fetch active project (boolean active = true) from DB by project_name
+   */
+  public Project getProject(final String name) {
+    Project fetchedProject = null;
+    try {
+      fetchedProject = this.projectLoader.fetchProjectByName(name);
+      loadAllProjectFlows(fetchedProject);
+    } catch (final ProjectManagerException e) {
+      logger.error("Could not load project" + name + " from store.", e);
     }
+    return fetchedProject;
+  }
 
-    /**
-     * fetch active project (boolean active = true) from DB by project_id
-     *
-     * @param id
-     * @return
-     */
-    public Project getProject(int id) {
-        Project fetchedProject = null;
-        try {
-            fetchedProject = projectLoader.fetchProjectById(id);
-            loadAllProjectFlows(fetchedProject);
-        } catch (ProjectManagerException e) {
-            logger.error("Could not load project" + id + " from store.", e);
-        }
-        return fetchedProject;
+  /**
+   * fetch active project (boolean active = true) from DB by project_id
+   */
+  public Project getProject(final int id) {
+    Project fetchedProject = null;
+    try {
+      fetchedProject = this.projectLoader.fetchProjectById(id);
+      loadAllProjectFlows(fetchedProject);
+    } catch (final ProjectManagerException e) {
+      logger.error("Could not load project" + id + " from store.", e);
     }
+    return fetchedProject;
+  }
 
-  public Project createProject(String projectName, String description,
-      User creator) throws ProjectManagerException {
+  public Project createProject(final String projectName, final String description,
+      final User creator) throws ProjectManagerException {
     if (projectName == null || projectName.trim().isEmpty()) {
       throw new ProjectManagerException("Project name cannot be empty.");
     } else if (description == null || description.trim().isEmpty()) {
@@ -239,156 +231,152 @@ public class ProjectManager {
 
     logger.info("Trying to create " + projectName + " by user "
         + creator.getUserId());
-    Project newProject =
-        projectLoader.createNewProject(projectName, description, creator);
+    final Project newProject =
+        this.projectLoader.createNewProject(projectName, description, creator);
 
-    if (creatorDefaultPermissions) {
+    if (this.creatorDefaultPermissions) {
       // Add permission to project
-      projectLoader.updatePermission(newProject, creator.getUserId(),
+      this.projectLoader.updatePermission(newProject, creator.getUserId(),
           new Permission(Permission.Type.ADMIN), false);
 
       // Add proxy user
       newProject.addProxyUser(creator.getUserId());
       try {
         updateProjectSetting(newProject);
-      } catch (ProjectManagerException e) {
+      } catch (final ProjectManagerException e) {
         e.printStackTrace();
         throw e;
       }
     }
 
-    projectLoader.postEvent(newProject, EventType.CREATED, creator.getUserId(),
+    this.projectLoader.postEvent(newProject, EventType.CREATED, creator.getUserId(),
         null);
 
     return newProject;
   }
 
-    /**
-     * Permanently delete all project files and properties data for all versions
-     * of a project and log event in project_events table
-     *
-     * @param project
-     * @param deleter
-     * @return
-     * @throws ProjectManagerException
-     */
-    public synchronized Project purgeProject(Project project, User deleter)
-        throws ProjectManagerException {
-        projectLoader.cleanOlderProjectVersion(project.getId(),
-            project.getVersion() + 1);
-        projectLoader
-            .postEvent(project, EventType.PURGE, deleter.getUserId(), String
-                .format("Purged versions before %d", project.getVersion() + 1));
-        return project;
-    }
+  /**
+   * Permanently delete all project files and properties data for all versions
+   * of a project and log event in project_events table
+   */
+  public synchronized Project purgeProject(final Project project, final User deleter)
+      throws ProjectManagerException {
+    this.projectLoader.cleanOlderProjectVersion(project.getId(),
+        project.getVersion() + 1);
+    this.projectLoader
+        .postEvent(project, EventType.PURGE, deleter.getUserId(), String
+            .format("Purged versions before %d", project.getVersion() + 1));
+    return project;
+  }
 
-  public synchronized Project removeProject(Project project, User deleter)
+  public synchronized Project removeProject(final Project project, final User deleter)
       throws ProjectManagerException {
-    projectLoader.removeProject(project, deleter.getUserId());
-    projectLoader.postEvent(project, EventType.DELETED, deleter.getUserId(),
+    this.projectLoader.removeProject(project, deleter.getUserId());
+    this.projectLoader.postEvent(project, EventType.DELETED, deleter.getUserId(),
         null);
     return project;
   }
 
-  public void updateProjectDescription(Project project, String description,
-      User modifier) throws ProjectManagerException {
-    projectLoader.updateDescription(project, description, modifier.getUserId());
-    projectLoader.postEvent(project, EventType.DESCRIPTION,
+  public void updateProjectDescription(final Project project, final String description,
+      final User modifier) throws ProjectManagerException {
+    this.projectLoader.updateDescription(project, description, modifier.getUserId());
+    this.projectLoader.postEvent(project, EventType.DESCRIPTION,
         modifier.getUserId(), "Description changed to " + description);
   }
 
-  public List<ProjectLogEvent> getProjectEventLogs(Project project,
-      int results, int skip) throws ProjectManagerException {
-    return projectLoader.getProjectEvents(project, results, skip);
+  public List<ProjectLogEvent> getProjectEventLogs(final Project project,
+      final int results, final int skip) throws ProjectManagerException {
+    return this.projectLoader.getProjectEvents(project, results, skip);
   }
 
-  public Props getProperties(Project project, String source)
+  public Props getProperties(final Project project, final String source)
       throws ProjectManagerException {
-    return projectLoader.fetchProjectProperty(project, source);
+    return this.projectLoader.fetchProjectProperty(project, source);
   }
 
-  public Props getJobOverrideProperty(Project project, String jobName)
+  public Props getJobOverrideProperty(final Project project, final String jobName)
       throws ProjectManagerException {
-    return projectLoader.fetchProjectProperty(project, jobName + ".jor");
+    return this.projectLoader.fetchProjectProperty(project, jobName + ".jor");
   }
 
-  public void setJobOverrideProperty(Project project, Props prop, String jobName, User modifier)
+  public void setJobOverrideProperty(final Project project, final Props prop, final String jobName,
+      final User modifier)
       throws ProjectManagerException {
     prop.setSource(jobName + ".jor");
-    Props oldProps =
-        projectLoader.fetchProjectProperty(project, prop.getSource());
+    final Props oldProps =
+        this.projectLoader.fetchProjectProperty(project, prop.getSource());
 
     if (oldProps == null) {
-      projectLoader.uploadProjectProperty(project, prop);
+      this.projectLoader.uploadProjectProperty(project, prop);
     } else {
-      projectLoader.updateProjectProperty(project, prop);
+      this.projectLoader.updateProjectProperty(project, prop);
     }
 
-    String diffMessage = PropsUtils.getPropertyDiff(oldProps, prop);
+    final String diffMessage = PropsUtils.getPropertyDiff(oldProps, prop);
 
-    projectLoader.postEvent(project, EventType.PROPERTY_OVERRIDE,
+    this.projectLoader.postEvent(project, EventType.PROPERTY_OVERRIDE,
         modifier.getUserId(), diffMessage);
     return;
   }
 
-  public void updateProjectSetting(Project project)
+  public void updateProjectSetting(final Project project)
       throws ProjectManagerException {
-    projectLoader.updateProjectSettings(project);
+    this.projectLoader.updateProjectSettings(project);
   }
 
-  public void addProjectProxyUser(Project project, String proxyName,
-      User modifier) throws ProjectManagerException {
+  public void addProjectProxyUser(final Project project, final String proxyName,
+      final User modifier) throws ProjectManagerException {
     logger.info("User " + modifier.getUserId() + " adding proxy user "
         + proxyName + " to project " + project.getName());
     project.addProxyUser(proxyName);
 
-    projectLoader.postEvent(project, EventType.PROXY_USER,
+    this.projectLoader.postEvent(project, EventType.PROXY_USER,
         modifier.getUserId(), "Proxy user " + proxyName
             + " is added to project.");
     updateProjectSetting(project);
   }
 
-  public void removeProjectProxyUser(Project project, String proxyName,
-      User modifier) throws ProjectManagerException {
+  public void removeProjectProxyUser(final Project project, final String proxyName,
+      final User modifier) throws ProjectManagerException {
     logger.info("User " + modifier.getUserId() + " removing proxy user "
         + proxyName + " from project " + project.getName());
     project.removeProxyUser(proxyName);
 
-    projectLoader.postEvent(project, EventType.PROXY_USER,
+    this.projectLoader.postEvent(project, EventType.PROXY_USER,
         modifier.getUserId(), "Proxy user " + proxyName
             + " has been removed form the project.");
     updateProjectSetting(project);
   }
 
-  public void updateProjectPermission(Project project, String name,
-      Permission perm, boolean group, User modifier)
+  public void updateProjectPermission(final Project project, final String name,
+      final Permission perm, final boolean group, final User modifier)
       throws ProjectManagerException {
     logger.info("User " + modifier.getUserId()
         + " updating permissions for project " + project.getName() + " for "
         + name + " " + perm.toString());
-    projectLoader.updatePermission(project, name, perm, group);
+    this.projectLoader.updatePermission(project, name, perm, group);
     if (group) {
-      projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
+      this.projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
           modifier.getUserId(), "Permission for group " + name + " set to "
               + perm.toString());
     } else {
-      projectLoader.postEvent(project, EventType.USER_PERMISSION,
+      this.projectLoader.postEvent(project, EventType.USER_PERMISSION,
           modifier.getUserId(), "Permission for user " + name + " set to "
               + perm.toString());
     }
   }
 
-  public void removeProjectPermission(Project project, String name,
-      boolean group, User modifier) throws ProjectManagerException {
+  public void removeProjectPermission(final Project project, final String name,
+      final boolean group, final User modifier) throws ProjectManagerException {
     logger.info("User " + modifier.getUserId()
         + " removing permissions for project " + project.getName() + " for "
         + name);
-    projectLoader.removePermission(project, name, group);
+    this.projectLoader.removePermission(project, name, group);
     if (group) {
-      projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
+      this.projectLoader.postEvent(project, EventType.GROUP_PERMISSION,
           modifier.getUserId(), "Permission for group " + name + " removed.");
     } else {
-      projectLoader.postEvent(project, EventType.USER_PERMISSION,
+      this.projectLoader.postEvent(project, EventType.USER_PERMISSION,
           modifier.getUserId(), "Permission for user " + name + " removed.");
     }
   }
@@ -401,23 +389,21 @@ public class ProjectManager {
    * {@ProjectFileHandler.deleteLocalFile}
    * to delete the temporary file.
    *
-   * @param project
    * @param version - latest version is used if value is -1
-   * @return ProjectFileHandler - null if can't find project zip file based on
-   *         project name and version
-   * @throws ProjectManagerException
+   * @return ProjectFileHandler - null if can't find project zip file based on project name and
+   * version
    */
-  public ProjectFileHandler getProjectFileHandler(Project project, int version)
+  public ProjectFileHandler getProjectFileHandler(final Project project, int version)
       throws ProjectManagerException {
 
     if (version == -1) {
-      version = projectLoader.getLatestProjectVersion(project);
+      version = this.projectLoader.getLatestProjectVersion(project);
     }
-    return storageManager.getProjectFile(project.getId(), version);
+    return this.storageManager.getProjectFile(project.getId(), version);
   }
 
-  public Map<String, ValidationReport> uploadProject(Project project,
-      File archive, String fileType, User uploader, Props additionalProps)
+  public Map<String, ValidationReport> uploadProject(final Project project,
+      final File archive, final String fileType, final User uploader, final Props additionalProps)
       throws ProjectManagerException {
     logger.info("Uploading files to " + project.getName());
 
@@ -433,7 +419,7 @@ public class ProjectManager {
         throw new ProjectManagerException("Unsupported archive type for file "
             + archive.getName());
       }
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ProjectManagerException("Error unzipping file.", e);
     }
 
@@ -442,7 +428,7 @@ public class ProjectManager {
     // value for the PROJECT_ARCHIVE_FILE_PATH key, it is necessary to
     // create a new instance of Props to make sure these different values
     // are isolated from each other.
-    Props prop = new Props(props);
+    final Props prop = new Props(this.props);
     prop.putAll(additionalProps);
     prop.put(ValidatorConfigs.PROJECT_ARCHIVE_FILE_PATH,
         archive.getAbsolutePath());
@@ -462,13 +448,13 @@ public class ProjectManager {
     // synchronization between uploads. Since we're already reloading the XML
     // config file and creating validator objects for each upload, this does
     // not add too much additional overhead.
-    ValidatorManager validatorManager = new XmlValidatorManager(prop);
+    final ValidatorManager validatorManager = new XmlValidatorManager(prop);
     logger.info("Validating project " + archive.getName()
         + " using the registered validators "
         + validatorManager.getValidatorsInfo().toString());
-    Map<String, ValidationReport> reports = validatorManager.validate(project, file);
+    final Map<String, ValidationReport> reports = validatorManager.validate(project, file);
     ValidationStatus status = ValidationStatus.PASS;
-    for (Entry<String, ValidationReport> report : reports.entrySet()) {
+    for (final Entry<String, ValidationReport> report : reports.entrySet()) {
       if (report.getValue().getStatus().compareTo(status) > 0) {
         status = report.getValue().getStatus();
       }
@@ -479,7 +465,7 @@ public class ProjectManager {
 
       try {
         FileUtils.deleteDirectory(file);
-      } catch (IOException e) {
+      } catch (final IOException e) {
         file.deleteOnExit();
         e.printStackTrace();
       }
@@ -487,74 +473,74 @@ public class ProjectManager {
       return reports;
     }
 
-    DirectoryFlowLoader loader =
+    final DirectoryFlowLoader loader =
         (DirectoryFlowLoader) validatorManager.getDefaultValidator();
-    Map<String, Props> jobProps = loader.getJobProps();
-    List<Props> propProps = loader.getProps();
+    final Map<String, Props> jobProps = loader.getJobProps();
+    final List<Props> propProps = loader.getProps();
 
     synchronized (project) {
-      int newVersion = projectLoader.getLatestProjectVersion(project) + 1;
-      Map<String, Flow> flows = loader.getFlowMap();
-      for (Flow flow : flows.values()) {
+      final int newVersion = this.projectLoader.getLatestProjectVersion(project) + 1;
+      final Map<String, Flow> flows = loader.getFlowMap();
+      for (final Flow flow : flows.values()) {
         flow.setProjectId(project.getId());
         flow.setVersion(newVersion);
       }
 
-      storageManager.uploadProject(project, newVersion, archive, uploader);
+      this.storageManager.uploadProject(project, newVersion, archive, uploader);
 
       logger.info("Uploading flow to db " + archive.getName());
-      projectLoader.uploadFlows(project, newVersion, flows.values());
+      this.projectLoader.uploadFlows(project, newVersion, flows.values());
       logger.info("Changing project versions " + archive.getName());
-      projectLoader.changeProjectVersion(project, newVersion,
+      this.projectLoader.changeProjectVersion(project, newVersion,
           uploader.getUserId());
       project.setFlows(flows);
       logger.info("Uploading Job properties");
-      projectLoader.uploadProjectProperties(project, new ArrayList<Props>(
+      this.projectLoader.uploadProjectProperties(project, new ArrayList<>(
           jobProps.values()));
       logger.info("Uploading Props properties");
-      projectLoader.uploadProjectProperties(project, propProps);
+      this.projectLoader.uploadProjectProperties(project, propProps);
     }
 
     logger.info("Uploaded project files. Cleaning up temp files.");
-    projectLoader.postEvent(project, EventType.UPLOADED, uploader.getUserId(),
+    this.projectLoader.postEvent(project, EventType.UPLOADED, uploader.getUserId(),
         "Uploaded project files zip " + archive.getName());
     try {
       FileUtils.deleteDirectory(file);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       file.deleteOnExit();
       e.printStackTrace();
     }
 
     logger.info("Cleaning up old install files older than "
-        + (project.getVersion() - projectVersionRetention));
-    projectLoader.cleanOlderProjectVersion(project.getId(),
-        project.getVersion() - projectVersionRetention);
+        + (project.getVersion() - this.projectVersionRetention));
+    this.projectLoader.cleanOlderProjectVersion(project.getId(),
+        project.getVersion() - this.projectVersionRetention);
 
     return reports;
   }
 
-  public void updateFlow(Project project, Flow flow)
+  public void updateFlow(final Project project, final Flow flow)
       throws ProjectManagerException {
-    projectLoader.updateFlow(project, flow.getVersion(), flow);
+    this.projectLoader.updateFlow(project, flow.getVersion(), flow);
   }
 
-  private File unzipFile(File archiveFile) throws IOException {
-    ZipFile zipfile = new ZipFile(archiveFile);
-    File unzipped = Utils.createTempDir(tempDir);
+  private File unzipFile(final File archiveFile) throws IOException {
+    final ZipFile zipfile = new ZipFile(archiveFile);
+    final File unzipped = Utils.createTempDir(this.tempDir);
     Utils.unzip(zipfile, unzipped);
     zipfile.close();
 
     return unzipped;
   }
 
-  public void postProjectEvent(Project project, EventType type, String user,
-      String message) {
-    projectLoader.postEvent(project, type, user, message);
+  public void postProjectEvent(final Project project, final EventType type, final String user,
+      final String message) {
+    this.projectLoader.postEvent(project, type, user, message);
   }
 
   public boolean loadProjectWhiteList() {
-    if (props.containsKey(ProjectWhitelist.XML_FILE_PARAM)) {
-      ProjectWhitelist.load(props);
+    if (this.props.containsKey(ProjectWhitelist.XML_FILE_PARAM)) {
+      ProjectWhitelist.load(this.props);
       return true;
     }
     return false;
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectManagerException.java b/azkaban-common/src/main/java/azkaban/project/ProjectManagerException.java
index fe1b50a..1d87090 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectManagerException.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectManagerException.java
@@ -20,13 +20,14 @@ import azkaban.spi.AzkabanException;
 
 
 public class ProjectManagerException extends AzkabanException {
+
   private static final long serialVersionUID = 1L;
 
-  public ProjectManagerException(String message) {
+  public ProjectManagerException(final String message) {
     super(message);
   }
 
-  public ProjectManagerException(String message, Throwable cause) {
+  public ProjectManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectSpec.java b/azkaban-common/src/main/java/azkaban/project/ProjectSpec.java
index 96789a6..037d633 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectSpec.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectSpec.java
@@ -23,44 +23,46 @@ import java.util.Map;
 
 
 public class ProjectSpec implements Serializable {
+
   private String version;
   private PreExecutionSpec preExec;
 
   public String getVersion() {
-    return version;
+    return this.version;
   }
 
-  public void setVersion(String version) {
+  public void setVersion(final String version) {
     this.version = version;
   }
 
   public PreExecutionSpec getPreExec() {
-    return preExec;
+    return this.preExec;
   }
 
-  public void setPreExec(PreExecutionSpec preExec) {
+  public void setPreExec(final PreExecutionSpec preExec) {
     this.preExec = preExec;
   }
 
   @Override
   public String toString() {
-    return "ProjectSpec{" + "version='" + version + '\'' + ", preExec=" + preExec + '}';
+    return "ProjectSpec{" + "version='" + this.version + '\'' + ", preExec=" + this.preExec + '}';
   }
 
   public static class PreExecutionSpec implements Serializable {
+
     private Map<String, URI> fetch;
 
     public Map<String, URI> getFetch() {
-      return fetch;
+      return this.fetch;
     }
 
-    public void setFetch(Map<String, URI> fetch) {
+    public void setFetch(final Map<String, URI> fetch) {
       this.fetch = fetch;
     }
 
     @Override
     public String toString() {
-      return "PreExecutionSpec{" + "fetch=" + fetch + '}';
+      return "PreExecutionSpec{" + "fetch=" + this.fetch + '}';
     }
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/project/ProjectWhitelist.java b/azkaban-common/src/main/java/azkaban/project/ProjectWhitelist.java
index 039ab3a..f8c82e1 100644
--- a/azkaban-common/src/main/java/azkaban/project/ProjectWhitelist.java
+++ b/azkaban-common/src/main/java/azkaban/project/ProjectWhitelist.java
@@ -1,5 +1,6 @@
 package azkaban.project;
 
+import azkaban.utils.Props;
 import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
@@ -7,65 +8,57 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.atomic.AtomicReference;
-
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
-
 import org.w3c.dom.Document;
 import org.w3c.dom.NamedNodeMap;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 
-import azkaban.utils.Props;
-
 /**
  * @author wkang
  *
- * This class manages project whitelist defined in xml config file.
- * An single xml config file contains different types of whitelisted
- * projects. For additional type of whitelist, modify WhitelistType enum.
- *
- * The xml config file should in the following format. Please note
- * the tag <MemoryCheck> is same as the defined enum MemoryCheck
+ *         This class manages project whitelist defined in xml config file. An single xml config
+ *         file contains different types of whitelisted projects. For additional type of whitelist,
+ *         modify WhitelistType enum.
  *
- * <ProjectWhitelist>
- *  <MemoryCheck>
- *      <project projectname="project1" />
- *      <project projectname="project2" />
- *  </MemoryCheck>
- * <ProjectWhitelist>
+ *         The xml config file should in the following format. Please note the tag <MemoryCheck> is
+ *         same as the defined enum MemoryCheck
  *
+ *         <ProjectWhitelist> <MemoryCheck> <project projectname="project1" /> <project
+ *         projectname="project2" /> </MemoryCheck> <ProjectWhitelist>
  */
 public class ProjectWhitelist {
+
   public static final String XML_FILE_PARAM = "project.whitelist.xml.file";
   private static final String PROJECT_WHITELIST_TAG = "ProjectWhitelist";
   private static final String PROJECT_TAG = "project";
   private static final String PROJECTID_ATTR = "projectid";
 
-  private static AtomicReference<Map<WhitelistType, Set<Integer>>> projectsWhitelisted =
-          new AtomicReference<Map<WhitelistType, Set<Integer>>>();
+  private static final AtomicReference<Map<WhitelistType, Set<Integer>>> projectsWhitelisted =
+      new AtomicReference<>();
 
-  static void load(Props props) {
-    String xmlFile = props.getString(XML_FILE_PARAM);
+  static void load(final Props props) {
+    final String xmlFile = props.getString(XML_FILE_PARAM);
     parseXMLFile(xmlFile);
   }
 
-  private static void parseXMLFile(String xmlFile) {
-    File file = new File(xmlFile);
+  private static void parseXMLFile(final String xmlFile) {
+    final File file = new File(xmlFile);
     if (!file.exists()) {
       throw new IllegalArgumentException("Project whitelist xml file " + xmlFile
           + " doesn't exist.");
     }
 
     // Creating the document builder to parse xml.
-    DocumentBuilderFactory docBuilderFactory =
+    final DocumentBuilderFactory docBuilderFactory =
         DocumentBuilderFactory.newInstance();
     DocumentBuilder builder = null;
     try {
       builder = docBuilderFactory.newDocumentBuilder();
-    } catch (ParserConfigurationException e) {
+    } catch (final ParserConfigurationException e) {
       throw new IllegalArgumentException(
           "Exception while parsing project whitelist xml. Document builder not created.", e);
     }
@@ -73,32 +66,32 @@ public class ProjectWhitelist {
     Document doc = null;
     try {
       doc = builder.parse(file);
-    } catch (SAXException e) {
+    } catch (final SAXException e) {
       throw new IllegalArgumentException("Exception while parsing " + xmlFile
           + ". Invalid XML.", e);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new IllegalArgumentException("Exception while parsing " + xmlFile
           + ". Error reading file.", e);
     }
 
-    Map<WhitelistType, Set<Integer>> projsWhitelisted = new HashMap<WhitelistType, Set<Integer>>();
-    NodeList tagList = doc.getChildNodes();
+    final Map<WhitelistType, Set<Integer>> projsWhitelisted = new HashMap<>();
+    final NodeList tagList = doc.getChildNodes();
     if (!tagList.item(0).getNodeName().equals(PROJECT_WHITELIST_TAG)) {
-      throw new RuntimeException("Cannot find tag '" +  PROJECT_WHITELIST_TAG + "' in " + xmlFile);
+      throw new RuntimeException("Cannot find tag '" + PROJECT_WHITELIST_TAG + "' in " + xmlFile);
     }
 
-    NodeList whitelist = tagList.item(0).getChildNodes();
+    final NodeList whitelist = tagList.item(0).getChildNodes();
     for (int n = 0; n < whitelist.getLength(); ++n) {
       if (whitelist.item(n).getNodeType() != Node.ELEMENT_NODE) {
         continue;
       }
 
-      String whitelistType = whitelist.item(n).getNodeName();
-      Set<Integer> projs = new HashSet<Integer>();
+      final String whitelistType = whitelist.item(n).getNodeName();
+      final Set<Integer> projs = new HashSet<>();
 
-      NodeList projectsList = whitelist.item(n).getChildNodes();
+      final NodeList projectsList = whitelist.item(n).getChildNodes();
       for (int i = 0; i < projectsList.getLength(); ++i) {
-        Node node = projectsList.item(i);
+        final Node node = projectsList.item(i);
         if (node.getNodeType() == Node.ELEMENT_NODE) {
           if (node.getNodeName().equals(PROJECT_TAG)) {
             parseProjectTag(node, projs);
@@ -110,22 +103,22 @@ public class ProjectWhitelist {
     projectsWhitelisted.set(projsWhitelisted);
   }
 
-  private static void parseProjectTag(Node node, Set<Integer> projects) {
-    NamedNodeMap projectAttrMap = node.getAttributes();
-    Node projectIdAttr = projectAttrMap.getNamedItem(PROJECTID_ATTR);
+  private static void parseProjectTag(final Node node, final Set<Integer> projects) {
+    final NamedNodeMap projectAttrMap = node.getAttributes();
+    final Node projectIdAttr = projectAttrMap.getNamedItem(PROJECTID_ATTR);
     if (projectIdAttr == null) {
       throw new RuntimeException("Error loading project. The '" + PROJECTID_ATTR
-              + "' attribute doesn't exist");
+          + "' attribute doesn't exist");
     }
 
-    String projectId = projectIdAttr.getNodeValue();
+    final String projectId = projectIdAttr.getNodeValue();
     projects.add(Integer.parseInt(projectId));
   }
 
-  public static boolean isProjectWhitelisted(int project, WhitelistType whitelistType) {
-    Map<WhitelistType, Set<Integer>> projsWhitelisted = projectsWhitelisted.get();
+  public static boolean isProjectWhitelisted(final int project, final WhitelistType whitelistType) {
+    final Map<WhitelistType, Set<Integer>> projsWhitelisted = projectsWhitelisted.get();
     if (projsWhitelisted != null) {
-      Set<Integer> projs = projsWhitelisted.get(whitelistType);
+      final Set<Integer> projs = projsWhitelisted.get(whitelistType);
       if (projs != null) {
         return projs.contains(project);
       }
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/ProjectValidator.java b/azkaban-common/src/main/java/azkaban/project/validator/ProjectValidator.java
index 3c4b0e2..6c3e6d4 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/ProjectValidator.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/ProjectValidator.java
@@ -1,9 +1,8 @@
 package azkaban.project.validator;
 
-import java.io.File;
-
 import azkaban.project.Project;
 import azkaban.utils.Props;
+import java.io.File;
 
 /**
  * Interface to be implemented by plugins which are to be registered with Azkaban
@@ -13,16 +12,11 @@ public interface ProjectValidator {
 
   /**
    * Initialize the validator using the given properties.
-   *
-   * @param configuration
-   * @return
    */
   boolean initialize(Props configuration);
 
   /**
    * Return a user friendly name of the validator.
-   *
-   * @return
    */
   String getValidatorName();
 
@@ -30,9 +24,6 @@ public interface ProjectValidator {
    * Validate the project inside the given directory. The validator, using its own
    * validation logic, will generate a {@link ValidationReport} representing the result of
    * the validation.
-   *
-   * @param projectDir
-   * @return
    */
   ValidationReport validateProject(Project project, File projectDir);
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/ValidationReport.java b/azkaban-common/src/main/java/azkaban/project/validator/ValidationReport.java
index e586ca1..321976b 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/ValidationReport.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/ValidationReport.java
@@ -19,127 +19,105 @@ public class ValidationReport {
   protected Set<String> _errorMsgs;
 
   public ValidationReport() {
-    _status = ValidationStatus.PASS;
-    _infoMsgs = new HashSet<String>();
-    _warningMsgs = new HashSet<String>();
-    _errorMsgs = new HashSet<String>();
+    this._status = ValidationStatus.PASS;
+    this._infoMsgs = new HashSet<>();
+    this._warningMsgs = new HashSet<>();
+    this._errorMsgs = new HashSet<>();
+  }
+
+  /**
+   * Return the severity level this information message is associated with.
+   */
+  public static ValidationStatus getInfoMsgLevel(final String msg) {
+    if (msg.startsWith("ERROR")) {
+      return ValidationStatus.ERROR;
+    }
+    if (msg.startsWith("WARN")) {
+      return ValidationStatus.WARN;
+    }
+    return ValidationStatus.PASS;
+  }
+
+  /**
+   * Get the raw information message.
+   */
+  public static String getInfoMsg(final String msg) {
+    if (msg.startsWith("ERROR")) {
+      return msg.replaceFirst("ERROR", "");
+    }
+    if (msg.startsWith("WARN")) {
+      return msg.replaceFirst("WARN", "");
+    }
+    return msg;
   }
 
   /**
    * Add an information message associated with warning messages
-   *
-   * @param msgs
    */
-  public void addWarnLevelInfoMsg(String msg) {
+  public void addWarnLevelInfoMsg(final String msg) {
     if (msg != null) {
-      _infoMsgs.add("WARN" + msg);
+      this._infoMsgs.add("WARN" + msg);
     }
   }
 
   /**
    * Add an information message associated with error messages
-   *
-   * @param msgs
    */
-  public void addErrorLevelInfoMsg(String msg) {
+  public void addErrorLevelInfoMsg(final String msg) {
     if (msg != null) {
-      _infoMsgs.add("ERROR" + msg);
+      this._infoMsgs.add("ERROR" + msg);
     }
   }
 
   /**
    * Add a message with status level being {@link ValidationStatus#WARN}
-   *
-   * @param msgs
    */
-  public void addWarningMsgs(Set<String> msgs) {
+  public void addWarningMsgs(final Set<String> msgs) {
     if (msgs != null) {
-      _warningMsgs.addAll(msgs);
-      if (!msgs.isEmpty() && _errorMsgs.isEmpty()) {
-        _status = ValidationStatus.WARN;
+      this._warningMsgs.addAll(msgs);
+      if (!msgs.isEmpty() && this._errorMsgs.isEmpty()) {
+        this._status = ValidationStatus.WARN;
       }
     }
   }
 
   /**
    * Add a message with status level being {@link ValidationStatus#ERROR}
-   *
-   * @param msgs
    */
-  public void addErrorMsgs(Set<String> msgs) {
+  public void addErrorMsgs(final Set<String> msgs) {
     if (msgs != null) {
-      _errorMsgs.addAll(msgs);
+      this._errorMsgs.addAll(msgs);
       if (!msgs.isEmpty()) {
-        _status = ValidationStatus.ERROR;
+        this._status = ValidationStatus.ERROR;
       }
     }
   }
 
   /**
    * Retrieve the status of the report.
-   *
-   * @return
    */
   public ValidationStatus getStatus() {
-    return _status;
+    return this._status;
   }
 
   /**
    * Retrieve the list of information messages.
-   *
-   * @return
    */
   public Set<String> getInfoMsgs() {
-    return _infoMsgs;
+    return this._infoMsgs;
   }
 
   /**
    * Retrieve the messages associated with status level {@link ValidationStatus#WARN}
-   *
-   * @return
    */
   public Set<String> getWarningMsgs() {
-    return _warningMsgs;
+    return this._warningMsgs;
   }
 
   /**
    * Retrieve the messages associated with status level {@link ValidationStatus#ERROR}
-   *
-   * @return
    */
   public Set<String> getErrorMsgs() {
-    return _errorMsgs;
-  }
-
-  /**
-   * Return the severity level this information message is associated with.
-   *
-   * @param msg
-   * @return
-   */
-  public static ValidationStatus getInfoMsgLevel(String msg) {
-    if (msg.startsWith("ERROR")) {
-      return ValidationStatus.ERROR;
-    }
-    if (msg.startsWith("WARN")) {
-      return ValidationStatus.WARN;
-    }
-    return ValidationStatus.PASS;
-  }
-
-  /**
-   * Get the raw information message.
-   *
-   * @param msg
-   * @return
-   */
-  public static String getInfoMsg(String msg) {
-    if (msg.startsWith("ERROR")) {
-      return msg.replaceFirst("ERROR", "");
-    }
-    if (msg.startsWith("WARN")) {
-      return msg.replaceFirst("WARN", "");
-    }
-    return msg;
+    return this._errorMsgs;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/ValidationStatus.java b/azkaban-common/src/main/java/azkaban/project/validator/ValidationStatus.java
index 4b1f0f4..6aaed24 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/ValidationStatus.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/ValidationStatus.java
@@ -12,11 +12,11 @@ public enum ValidationStatus {
   private final String _status;
 
   private ValidationStatus(final String status) {
-    _status = status;
+    this._status = status;
   }
 
   @Override
   public String toString() {
-    return _status;
+    return this._status;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorClassLoader.java b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorClassLoader.java
index 8bec05a..2f5ea85 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorClassLoader.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorClassLoader.java
@@ -19,19 +19,19 @@ import sun.net.www.protocol.jar.JarURLConnection;
  */
 public class ValidatorClassLoader extends URLClassLoader {
 
-  protected HashSet<String> setJarFileNames2Close = new HashSet<String>();
+  protected HashSet<String> setJarFileNames2Close = new HashSet<>();
 
-  public ValidatorClassLoader(URL[] urls, ClassLoader parent) {
+  public ValidatorClassLoader(final URL[] urls, final ClassLoader parent) {
     super(urls, parent);
   }
 
-  public ValidatorClassLoader(URL[] urls) {
+  public ValidatorClassLoader(final URL[] urls) {
     super(urls);
   }
 
   @Override
   public void close() throws ValidatorManagerException {
-    setJarFileNames2Close.clear();
+    this.setJarFileNames2Close.clear();
     closeClassLoader(this);
     finalizeNativeLibs(this);
     cleanupJarFileFactory();
@@ -40,14 +40,13 @@ public class ValidatorClassLoader extends URLClassLoader {
   /**
    * cleanup jar file factory cache
    */
-  @SuppressWarnings({ "nls", "rawtypes" })
   public boolean cleanupJarFileFactory() throws ValidatorManagerException {
     boolean res = false;
     final Class classJarURLConnection = JarURLConnection.class;
     Field f;
     try {
       f = classJarURLConnection.getDeclaredField("factory");
-    } catch (NoSuchFieldException e) {
+    } catch (final NoSuchFieldException e) {
       throw new ValidatorManagerException(e);
     }
     if (f == null) {
@@ -57,13 +56,13 @@ public class ValidatorClassLoader extends URLClassLoader {
     Object obj;
     try {
       obj = f.get(null);
-    } catch (IllegalAccessException e) {
+    } catch (final IllegalAccessException e) {
       throw new ValidatorManagerException(e);
     }
     if (obj == null) {
       return false;
     }
-    Class classJarFileFactory = obj.getClass();
+    final Class classJarFileFactory = obj.getClass();
 
     HashMap fileCache = null;
     try {
@@ -88,18 +87,18 @@ public class ValidatorClassLoader extends URLClassLoader {
       throw new ValidatorManagerException(e);
     }
     if (urlCache != null) {
-      HashMap urlCacheTmp = (HashMap) urlCache.clone();
-      Iterator it = urlCacheTmp.keySet().iterator();
+      final HashMap urlCacheTmp = (HashMap) urlCache.clone();
+      final Iterator it = urlCacheTmp.keySet().iterator();
       while (it.hasNext()) {
         obj = it.next();
         if (!(obj instanceof JarFile)) {
           continue;
         }
-        JarFile jarFile = (JarFile) obj;
-        if (setJarFileNames2Close.contains(jarFile.getName())) {
+        final JarFile jarFile = (JarFile) obj;
+        if (this.setJarFileNames2Close.contains(jarFile.getName())) {
           try {
             jarFile.close();
-          } catch (IOException e) {
+          } catch (final IOException e) {
             throw new ValidatorManagerException(e);
           }
           if (fileCache != null) {
@@ -110,19 +109,19 @@ public class ValidatorClassLoader extends URLClassLoader {
       }
       res = true;
     } else if (fileCache != null) {
-      HashMap fileCacheTmp = (HashMap) fileCache.clone();
-      Iterator it = fileCacheTmp.keySet().iterator();
+      final HashMap fileCacheTmp = (HashMap) fileCache.clone();
+      final Iterator it = fileCacheTmp.keySet().iterator();
       while (it.hasNext()) {
-        Object key = it.next();
+        final Object key = it.next();
         obj = fileCache.get(key);
         if (!(obj instanceof JarFile)) {
           continue;
         }
-        JarFile jarFile = (JarFile) obj;
-        if (setJarFileNames2Close.contains(jarFile.getName())) {
+        final JarFile jarFile = (JarFile) obj;
+        if (this.setJarFileNames2Close.contains(jarFile.getName())) {
           try {
             jarFile.close();
-          } catch (IOException e) {
+          } catch (final IOException e) {
             throw new ValidatorManagerException(e);
           }
           fileCache.remove(key);
@@ -130,26 +129,23 @@ public class ValidatorClassLoader extends URLClassLoader {
       }
       res = true;
     }
-    setJarFileNames2Close.clear();
+    this.setJarFileNames2Close.clear();
     return res;
   }
 
   /**
    * close jar files of cl
-   * @param cl
-   * @return
    */
-  @SuppressWarnings({ "nls", "rawtypes" })
-  public boolean closeClassLoader(ClassLoader cl) throws ValidatorManagerException {
+  public boolean closeClassLoader(final ClassLoader cl) throws ValidatorManagerException {
     boolean res = false;
     if (cl == null) {
       return res;
     }
-    Class classURLClassLoader = URLClassLoader.class;
+    final Class classURLClassLoader = URLClassLoader.class;
     Field f = null;
     try {
       f = classURLClassLoader.getDeclaredField("ucp");
-    } catch (NoSuchFieldException e) {
+    } catch (final NoSuchFieldException e) {
       throw new ValidatorManagerException(e);
     }
     if (f != null) {
@@ -157,7 +153,7 @@ public class ValidatorClassLoader extends URLClassLoader {
       Object obj = null;
       try {
         obj = f.get(cl);
-      } catch (IllegalAccessException e) {
+      } catch (final IllegalAccessException e) {
         throw new ValidatorManagerException(e);
       }
       if (obj != null) {
@@ -165,7 +161,7 @@ public class ValidatorClassLoader extends URLClassLoader {
         f = null;
         try {
           f = ucp.getClass().getDeclaredField("loaders");
-        } catch (NoSuchFieldException e) {
+        } catch (final NoSuchFieldException e) {
           throw new ValidatorManagerException(e);
         }
         if (f != null) {
@@ -174,7 +170,7 @@ public class ValidatorClassLoader extends URLClassLoader {
           try {
             loaders = (ArrayList) f.get(ucp);
             res = true;
-          } catch (IllegalAccessException e) {
+          } catch (final IllegalAccessException e) {
             throw new ValidatorManagerException(e);
           }
           for (int i = 0; loaders != null && i < loaders.size(); i++) {
@@ -182,22 +178,22 @@ public class ValidatorClassLoader extends URLClassLoader {
             f = null;
             try {
               f = obj.getClass().getDeclaredField("jar");
-            } catch (NoSuchFieldException e) {
+            } catch (final NoSuchFieldException e) {
               throw new ValidatorManagerException(e);
             }
             if (f != null) {
               f.setAccessible(true);
               try {
                 obj = f.get(obj);
-              } catch (IllegalAccessException e) {
+              } catch (final IllegalAccessException e) {
                 throw new ValidatorManagerException(e);
               }
               if (obj instanceof JarFile) {
                 final JarFile jarFile = (JarFile) obj;
-                setJarFileNames2Close.add(jarFile.getName());
+                this.setJarFileNames2Close.add(jarFile.getName());
                 try {
                   jarFile.close();
-                } catch (IOException e) {
+                } catch (final IOException e) {
                   throw new ValidatorManagerException(e);
                 }
               }
@@ -211,17 +207,14 @@ public class ValidatorClassLoader extends URLClassLoader {
 
   /**
    * finalize native libraries
-   * @param cl
-   * @return
    */
-  @SuppressWarnings({ "nls", "rawtypes" })
-  public boolean finalizeNativeLibs(ClassLoader cl) throws ValidatorManagerException {
+  public boolean finalizeNativeLibs(final ClassLoader cl) throws ValidatorManagerException {
     boolean res = false;
-    Class classClassLoader = ClassLoader.class;
+    final Class classClassLoader = ClassLoader.class;
     java.lang.reflect.Field nativeLibraries = null;
     try {
       nativeLibraries = classClassLoader.getDeclaredField("nativeLibraries");
-    } catch (NoSuchFieldException e) {
+    } catch (final NoSuchFieldException e) {
       throw new ValidatorManagerException(e);
     }
     if (nativeLibraries == null) {
@@ -231,28 +224,28 @@ public class ValidatorClassLoader extends URLClassLoader {
     Object obj = null;
     try {
       obj = nativeLibraries.get(cl);
-    } catch (IllegalAccessException e) {
+    } catch (final IllegalAccessException e) {
       throw new ValidatorManagerException(e);
     }
     if (!(obj instanceof Vector)) {
       return res;
     }
     res = true;
-    Vector java_lang_ClassLoader_NativeLibrary = (Vector) obj;
-    for (Object lib : java_lang_ClassLoader_NativeLibrary) {
+    final Vector java_lang_ClassLoader_NativeLibrary = (Vector) obj;
+    for (final Object lib : java_lang_ClassLoader_NativeLibrary) {
       java.lang.reflect.Method finalize = null;
       try {
         finalize = lib.getClass().getDeclaredMethod("finalize", new Class[0]);
-      } catch (NoSuchMethodException e) {
+      } catch (final NoSuchMethodException e) {
         throw new ValidatorManagerException(e);
       }
       if (finalize != null) {
         finalize.setAccessible(true);
         try {
           finalize.invoke(lib, new Object[0]);
-        } catch (IllegalAccessException e) {
+        } catch (final IllegalAccessException e) {
           throw new ValidatorManagerException(e);
-        } catch (InvocationTargetException e) {
+        } catch (final InvocationTargetException e) {
           throw new ValidatorManagerException(e);
         }
       }
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorConfigs.java b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorConfigs.java
index 56beb00..267b919 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorConfigs.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorConfigs.java
@@ -2,35 +2,48 @@ package azkaban.project.validator;
 
 public class ValidatorConfigs {
 
-  private ValidatorConfigs() {} // Prevents instantiation
-
-  /** Key for the config param specifying the directory containing validator JAR files **/
+  /**
+   * Key for the config param specifying the directory containing validator JAR files
+   **/
   public static final String VALIDATOR_PLUGIN_DIR = "project.validators.dir";
-
-  /** Default validator directory **/
+  /**
+   * Default validator directory
+   **/
   public static final String DEFAULT_VALIDATOR_DIR = "validators";
-
-  /** Key for the config param specifying the location of validator xml configuration file, no default value **/
+  /**
+   * Key for the config param specifying the location of validator xml configuration file, no
+   * default value
+   **/
   public static final String XML_FILE_PARAM = "project.validators.xml.file";
-
-  /** Key for the config param indicating whether the user choose to turn on the auto-fix feature **/
+  /**
+   * Key for the config param indicating whether the user choose to turn on the auto-fix feature
+   **/
   public static final String CUSTOM_AUTO_FIX_FLAG_PARAM = "project.validators.fix.flag";
-
-  /** Default custom auto fix flag. Turn auto-fix feature on by default. **/
+  /**
+   * Default custom auto fix flag. Turn auto-fix feature on by default.
+   **/
   public static final Boolean DEFAULT_CUSTOM_AUTO_FIX_FLAG = true;
-
-  /** Key for the config param indicating whether to show auto-fix related UI to the user **/
+  /**
+   * Key for the config param indicating whether to show auto-fix related UI to the user
+   **/
   public static final String VALIDATOR_AUTO_FIX_PROMPT_FLAG_PARAM = "project.validators.fix.prompt";
-
-  /** Do not show auto-fix related UI by default **/
+  /**
+   * Do not show auto-fix related UI by default
+   **/
   public static final Boolean DEFAULT_VALIDATOR_AUTO_FIX_PROMPT_FLAG = false;
-
-  /** Key for the config param specifying the label to be displayed with auto-fix UI **/
+  /**
+   * Key for the config param specifying the label to be displayed with auto-fix UI
+   **/
   public static final String VALIDATOR_AUTO_FIX_PROMPT_LABEL_PARAM = "project.validators.fix.label";
-
-  /** Key for the config param specifying the link address with detailed information about auto-fix **/
+  /**
+   * Key for the config param specifying the link address with detailed information about auto-fix
+   **/
   public static final String VALIDATOR_AUTO_FIX_PROMPT_LINK_PARAM = "project.validators.fix.link";
-
-  /** Key for the confi param indicating path to the project archive file **/
+  /**
+   * Key for the confi param indicating path to the project archive file
+   **/
   public static final String PROJECT_ARCHIVE_FILE_PATH = "project.archive.file.path";
+
+  private ValidatorConfigs() {
+  } // Prevents instantiation
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManager.java b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManager.java
index e759ad2..f7d89c7 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManager.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManager.java
@@ -1,26 +1,22 @@
 package azkaban.project.validator;
 
+import azkaban.project.Project;
+import azkaban.utils.Props;
 import java.io.File;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.log4j.Logger;
 
-import azkaban.project.Project;
-import azkaban.utils.Props;
-
 /**
  * ValidatorManager is responsible for loading the list of validators specified in the
  * Azkaban validator configuration file. Once these validators are loaded, the ValidatorManager
  * will use the registered validators to verify each uploaded project before persisting it.
  */
 public interface ValidatorManager {
+
   /**
    * Load the validators using the given properties. Each validator is also given the specified
    * logger to record any necessary message in the Azkaban log file.
-   *
-   * @param props
-   * @param logger
    */
   void loadValidators(Props props, Logger logger);
 
@@ -28,9 +24,6 @@ public interface ValidatorManager {
    * Validate the given project using the registered list of validators. This method returns a
    * map of {@link ValidationReport} with the key being the validator's name and the value being
    * the {@link ValidationReport} generated by that validator.
-   *
-   * @param projectDir
-   * @return
    */
   Map<String, ValidationReport> validate(Project project, File projectDir);
 
@@ -38,15 +31,11 @@ public interface ValidatorManager {
    * The ValidatorManager should have a default validator which checks for the most essential
    * components of a project. The ValidatorManager should always load the default validator.
    * This method returns the default validator of this ValidatorManager.
-   *
-   * @return
    */
   ProjectValidator getDefaultValidator();
 
   /**
    * Returns a list of String containing the name of each registered validators.
-   *
-   * @return
    */
   List<String> getValidatorsInfo();
 }
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManagerException.java b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManagerException.java
index 8b04c2f..a8744b5 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManagerException.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/ValidatorManagerException.java
@@ -1,17 +1,18 @@
 package azkaban.project.validator;
 
 public class ValidatorManagerException extends RuntimeException {
+
   private static final long serialVersionUID = 1L;
 
-  public ValidatorManagerException(String message) {
+  public ValidatorManagerException(final String message) {
     super(message);
   }
 
-  public ValidatorManagerException(Throwable cause) {
+  public ValidatorManagerException(final Throwable cause) {
     super(cause);
   }
 
-  public ValidatorManagerException(String message, Throwable cause) {
+  public ValidatorManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/project/validator/XmlValidatorManager.java b/azkaban-common/src/main/java/azkaban/project/validator/XmlValidatorManager.java
index cc6d005..bf625fe 100644
--- a/azkaban-common/src/main/java/azkaban/project/validator/XmlValidatorManager.java
+++ b/azkaban-common/src/main/java/azkaban/project/validator/XmlValidatorManager.java
@@ -1,5 +1,8 @@
 package azkaban.project.validator;
 
+import azkaban.project.DirectoryFlowLoader;
+import azkaban.project.Project;
+import azkaban.utils.Props;
 import java.io.File;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
@@ -11,11 +14,9 @@ import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
-
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
-
 import org.apache.log4j.Logger;
 import org.w3c.dom.Document;
 import org.w3c.dom.NamedNodeMap;
@@ -23,50 +24,43 @@ import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 import org.xml.sax.SAXException;
 
-import azkaban.project.Project;
-import azkaban.project.DirectoryFlowLoader;
-import azkaban.utils.Props;
-
 /**
  * Xml implementation of the ValidatorManager. Looks for the property
  * project.validators.xml.file in the azkaban properties.
  *
  * The xml to be in the following form:
  * <azkaban-validators>
- *   <validator classname="validator class name">
- *     <!-- optional configurations for each individual validator -->
- *     <property key="validator property key" value="validator property value" />
- *     ...
- *   </validator>
+ * <validator classname="validator class name">
+ * <!-- optional configurations for each individual validator -->
+ * <property key="validator property key" value="validator property value" />
+ * ...
+ * </validator>
  * </azkaban-validators>
  */
 public class XmlValidatorManager implements ValidatorManager {
-  private static final Logger logger = Logger.getLogger(XmlValidatorManager.class);
 
   public static final String AZKABAN_VALIDATOR_TAG = "azkaban-validators";
   public static final String VALIDATOR_TAG = "validator";
   public static final String CLASSNAME_ATTR = "classname";
   public static final String ITEM_TAG = "property";
   public static final String DEFAULT_VALIDATOR_KEY = "Directory Flow";
-
-  private static Map<String, Long> resourceTimestamps = new HashMap<String, Long>();
+  private static final Logger logger = Logger.getLogger(XmlValidatorManager.class);
+  private static final Map<String, Long> resourceTimestamps = new HashMap<>();
   private static ValidatorClassLoader validatorLoader;
-
+  private final String validatorDirPath;
   private Map<String, ProjectValidator> validators;
-  private String validatorDirPath;
 
   /**
    * Load the validator plugins from the validator directory (default being validators/) into
    * the validator ClassLoader. This enables creating instances of these validators in the
    * loadValidators() method.
-   *
-   * @param props
    */
-  public XmlValidatorManager(Props props) {
-    validatorDirPath = props.getString(ValidatorConfigs.VALIDATOR_PLUGIN_DIR, ValidatorConfigs.DEFAULT_VALIDATOR_DIR);
-    File validatorDir = new File(validatorDirPath);
+  public XmlValidatorManager(final Props props) {
+    this.validatorDirPath = props
+        .getString(ValidatorConfigs.VALIDATOR_PLUGIN_DIR, ValidatorConfigs.DEFAULT_VALIDATOR_DIR);
+    final File validatorDir = new File(this.validatorDirPath);
     if (!validatorDir.canRead() || !validatorDir.isDirectory()) {
-      logger.warn("Validator directory " + validatorDirPath
+      logger.warn("Validator directory " + this.validatorDirPath
           + " does not exist or is not a directory.");
     }
 
@@ -76,19 +70,19 @@ public class XmlValidatorManager implements ValidatorManager {
     // Load the validators specified in the xml file.
     try {
       loadValidators(props, logger);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error("Cannot load all the validators.");
       throw new ValidatorManagerException(e);
     }
   }
 
   private void checkResources() {
-    File validatorDir = new File(validatorDirPath);
-    List<URL> resources = new ArrayList<URL>();
+    final File validatorDir = new File(this.validatorDirPath);
+    final List<URL> resources = new ArrayList<>();
     boolean reloadResources = false;
     try {
       if (validatorDir.canRead() && validatorDir.isDirectory()) {
-        for (File f : validatorDir.listFiles()) {
+        for (final File f : validatorDir.listFiles()) {
           if (f.getName().endsWith(".jar")) {
             resources.add(f.toURI().toURL());
             if (resourceTimestamps.get(f.getName()) == null
@@ -100,17 +94,17 @@ public class XmlValidatorManager implements ValidatorManager {
           }
         }
       }
-    } catch (MalformedURLException e) {
+    } catch (final MalformedURLException e) {
       throw new ValidatorManagerException(e);
     }
 
     if (reloadResources) {
       if (validatorLoader != null) {
         try {
-        // Since we cannot use Java 7 feature inside Azkaban (....), we need a customized class loader
-        // that does the close for us.
+          // Since we cannot use Java 7 feature inside Azkaban (....), we need a customized class loader
+          // that does the close for us.
           validatorLoader.close();
-        } catch (ValidatorManagerException e) {
+        } catch (final ValidatorManagerException e) {
           logger.error("Cannot reload validator classloader because failure "
               + "to close the validator classloader.", e);
           // We do not throw the ValidatorManagerException because we do not want to crash Azkaban at runtime.
@@ -122,38 +116,41 @@ public class XmlValidatorManager implements ValidatorManager {
 
   /**
    * Instances of the validators are created here rather than in the constructors. This is because
-   * some validators might need to maintain project-specific states, such as {@link DirectoryFlowLoader}.
-   * By instantiating the validators here, it ensures that the validator objects are project-specific,
-   * rather than global.
+   * some validators might need to maintain project-specific states, such as {@link
+   * DirectoryFlowLoader}. By instantiating the validators here, it ensures that the validator
+   * objects are project-specific, rather than global.
    *
    * {@inheritDoc}
-   * @see azkaban.project.validator.ValidatorManager#loadValidators(azkaban.utils.Props, org.apache.log4j.Logger)
+   *
+   * @see azkaban.project.validator.ValidatorManager#loadValidators(azkaban.utils.Props,
+   * org.apache.log4j.Logger)
    */
   @Override
-  public void loadValidators(Props props, Logger log) {
-    validators = new LinkedHashMap<String, ProjectValidator>();
+  public void loadValidators(final Props props, final Logger log) {
+    this.validators = new LinkedHashMap<>();
     // Add the default validator
-    DirectoryFlowLoader flowLoader = new DirectoryFlowLoader(props, log);
-    validators.put(flowLoader.getValidatorName(), flowLoader);
+    final DirectoryFlowLoader flowLoader = new DirectoryFlowLoader(props, log);
+    this.validators.put(flowLoader.getValidatorName(), flowLoader);
 
     if (!props.containsKey(ValidatorConfigs.XML_FILE_PARAM)) {
-      logger.warn("Azkaban properties file does not contain the key " + ValidatorConfigs.XML_FILE_PARAM);
+      logger.warn(
+          "Azkaban properties file does not contain the key " + ValidatorConfigs.XML_FILE_PARAM);
       return;
     }
-    String xmlPath = props.get(ValidatorConfigs.XML_FILE_PARAM);
-    File file = new File(xmlPath);
+    final String xmlPath = props.get(ValidatorConfigs.XML_FILE_PARAM);
+    final File file = new File(xmlPath);
     if (!file.exists()) {
       logger.error("Azkaban validator configuration file " + xmlPath + " does not exist.");
       return;
     }
 
     // Creating the document builder to parse xml.
-    DocumentBuilderFactory docBuilderFactory =
+    final DocumentBuilderFactory docBuilderFactory =
         DocumentBuilderFactory.newInstance();
     DocumentBuilder builder = null;
     try {
       builder = docBuilderFactory.newDocumentBuilder();
-    } catch (ParserConfigurationException e) {
+    } catch (final ParserConfigurationException e) {
       throw new ValidatorManagerException(
           "Exception while parsing validator xml. Document builder not created.", e);
     }
@@ -161,20 +158,20 @@ public class XmlValidatorManager implements ValidatorManager {
     Document doc = null;
     try {
       doc = builder.parse(file);
-    } catch (SAXException e) {
+    } catch (final SAXException e) {
       throw new ValidatorManagerException("Exception while parsing " + xmlPath
           + ". Invalid XML.", e);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ValidatorManagerException("Exception while parsing " + xmlPath
           + ". Error reading file.", e);
     }
 
-    NodeList tagList = doc.getChildNodes();
-    Node azkabanValidators = tagList.item(0);
+    final NodeList tagList = doc.getChildNodes();
+    final Node azkabanValidators = tagList.item(0);
 
-    NodeList azkabanValidatorsList = azkabanValidators.getChildNodes();
+    final NodeList azkabanValidatorsList = azkabanValidators.getChildNodes();
     for (int i = 0; i < azkabanValidatorsList.getLength(); ++i) {
-      Node node = azkabanValidatorsList.item(i);
+      final Node node = azkabanValidatorsList.item(i);
       if (node.getNodeType() == Node.ELEMENT_NODE) {
         if (node.getNodeName().equals(VALIDATOR_TAG)) {
           parseValidatorTag(node, props, log);
@@ -183,42 +180,41 @@ public class XmlValidatorManager implements ValidatorManager {
     }
   }
 
-  @SuppressWarnings("unchecked")
-  private void parseValidatorTag(Node node, Props props, Logger log) {
-    NamedNodeMap validatorAttrMap = node.getAttributes();
-    Node classNameAttr = validatorAttrMap.getNamedItem(CLASSNAME_ATTR);
+  private void parseValidatorTag(final Node node, final Props props, final Logger log) {
+    final NamedNodeMap validatorAttrMap = node.getAttributes();
+    final Node classNameAttr = validatorAttrMap.getNamedItem(CLASSNAME_ATTR);
     if (classNameAttr == null) {
       throw new ValidatorManagerException(
           "Error loading validator. The validator 'classname' attribute doesn't exist");
     }
 
-    NodeList keyValueItemsList = node.getChildNodes();
+    final NodeList keyValueItemsList = node.getChildNodes();
     for (int i = 0; i < keyValueItemsList.getLength(); i++) {
-      Node keyValuePair = keyValueItemsList.item(i);
+      final Node keyValuePair = keyValueItemsList.item(i);
       if (keyValuePair.getNodeName().equals(ITEM_TAG)) {
         parseItemTag(keyValuePair, props);
       }
     }
-    String className = classNameAttr.getNodeValue();
+    final String className = classNameAttr.getNodeValue();
     try {
-      Class<? extends ProjectValidator> validatorClass =
-          (Class<? extends ProjectValidator>)validatorLoader.loadClass(className);
-      Constructor<?> validatorConstructor =
+      final Class<? extends ProjectValidator> validatorClass =
+          (Class<? extends ProjectValidator>) validatorLoader.loadClass(className);
+      final Constructor<?> validatorConstructor =
           validatorClass.getConstructor(Logger.class);
-      ProjectValidator validator = (ProjectValidator) validatorConstructor.newInstance(log);
+      final ProjectValidator validator = (ProjectValidator) validatorConstructor.newInstance(log);
       validator.initialize(props);
-      validators.put(validator.getValidatorName(), validator);
+      this.validators.put(validator.getValidatorName(), validator);
       logger.info("Added validator " + className + " to list of validators.");
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error("Could not instantiate ProjectValidator " + className);
       throw new ValidatorManagerException(e);
     }
   }
 
-  private void parseItemTag(Node node, Props props) {
-    NamedNodeMap keyValueMap = node.getAttributes();
-    Node keyAttr = keyValueMap.getNamedItem("key");
-    Node valueAttr = keyValueMap.getNamedItem("value");
+  private void parseItemTag(final Node node, final Props props) {
+    final NamedNodeMap keyValueMap = node.getAttributes();
+    final Node keyAttr = keyValueMap.getNamedItem("key");
+    final Node valueAttr = keyValueMap.getNamedItem("value");
     if (keyAttr == null || valueAttr == null) {
       throw new ValidatorManagerException("Error loading validator key/value "
           + "pair. The 'key' or 'value' attribute doesn't exist");
@@ -227,9 +223,9 @@ public class XmlValidatorManager implements ValidatorManager {
   }
 
   @Override
-  public Map<String, ValidationReport> validate(Project project, File projectDir) {
-    Map<String, ValidationReport> reports = new LinkedHashMap<String, ValidationReport>();
-    for (Entry<String, ProjectValidator> validator : validators.entrySet()) {
+  public Map<String, ValidationReport> validate(final Project project, final File projectDir) {
+    final Map<String, ValidationReport> reports = new LinkedHashMap<>();
+    for (final Entry<String, ProjectValidator> validator : this.validators.entrySet()) {
       reports.put(validator.getKey(), validator.getValue().validateProject(project, projectDir));
       logger.info("Validation status of validator " + validator.getKey() + " is "
           + reports.get(validator.getKey()).getStatus());
@@ -239,13 +235,13 @@ public class XmlValidatorManager implements ValidatorManager {
 
   @Override
   public ProjectValidator getDefaultValidator() {
-    return validators.get(DEFAULT_VALIDATOR_KEY);
+    return this.validators.get(DEFAULT_VALIDATOR_KEY);
   }
 
   @Override
   public List<String> getValidatorsInfo() {
-    List<String> info = new ArrayList<String>();
-    for (String key : validators.keySet()) {
+    final List<String> info = new ArrayList<>();
+    for (final String key : this.validators.keySet()) {
       info.add(key);
     }
     return info;
diff --git a/azkaban-common/src/main/java/azkaban/scheduler/Schedule.java b/azkaban-common/src/main/java/azkaban/scheduler/Schedule.java
index 2a2018c..d495059 100644
--- a/azkaban-common/src/main/java/azkaban/scheduler/Schedule.java
+++ b/azkaban-common/src/main/java/azkaban/scheduler/Schedule.java
@@ -20,13 +20,11 @@ import azkaban.executor.ExecutionOptions;
 import azkaban.sla.SlaOption;
 import azkaban.utils.Pair;
 import azkaban.utils.Utils;
-
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.Days;
@@ -41,51 +39,52 @@ import org.quartz.CronExpression;
 
 public class Schedule {
 
+  private final int projectId;
+  private final String projectName;
+  private final String flowName;
+  private final long firstSchedTime;
+  private final DateTimeZone timezone;
+  private final long lastModifyTime;
+  private final ReadablePeriod period;
+  private final String submitUser;
+  private final String status;
+  private final long submitTime;
+  private final String cronExpression;
+  private final boolean skipPastOccurrences = true;
   private int scheduleId;
-  private int projectId;
-  private String projectName;
-  private String flowName;
-  private long firstSchedTime;
-  private DateTimeZone timezone;
-  private long lastModifyTime;
-  private ReadablePeriod period;
   private long nextExecTime;
-  private String submitUser;
-  private String status;
-  private long submitTime;
-  private String cronExpression;
-
-  private boolean skipPastOccurrences = true;
-
   private ExecutionOptions executionOptions;
   private List<SlaOption> slaOptions;
 
-  public Schedule(int scheduleId, int projectId, String projectName,
-      String flowName, String status, long firstSchedTime,
-      DateTimeZone timezone, ReadablePeriod period, long lastModifyTime,
-      long nextExecTime, long submitTime, String submitUser) {
+  public Schedule(final int scheduleId, final int projectId, final String projectName,
+      final String flowName, final String status, final long firstSchedTime,
+      final DateTimeZone timezone, final ReadablePeriod period, final long lastModifyTime,
+      final long nextExecTime, final long submitTime, final String submitUser) {
 
     this(scheduleId, projectId, projectName, flowName, status, firstSchedTime,
         timezone, period, lastModifyTime, nextExecTime, submitTime, submitUser,
         null, null, null);
   }
 
-  public Schedule(int scheduleId, int projectId, String projectName,
-      String flowName, String status, long firstSchedTime, String timezoneId,
-      String period, long lastModifyTime, long nextExecTime, long submitTime,
-      String submitUser, ExecutionOptions executionOptions,
-      List<SlaOption> slaOptions) {
+  public Schedule(final int scheduleId, final int projectId, final String projectName,
+      final String flowName, final String status, final long firstSchedTime,
+      final String timezoneId,
+      final String period, final long lastModifyTime, final long nextExecTime,
+      final long submitTime,
+      final String submitUser, final ExecutionOptions executionOptions,
+      final List<SlaOption> slaOptions) {
     this(scheduleId, projectId, projectName, flowName, status, firstSchedTime,
         DateTimeZone.forID(timezoneId), parsePeriodString(period),
         lastModifyTime, nextExecTime, submitTime, submitUser, executionOptions,
         slaOptions, null);
   }
 
-  public Schedule(int scheduleId, int projectId, String projectName,
-      String flowName, String status, long firstSchedTime,
-      DateTimeZone timezone, ReadablePeriod period, long lastModifyTime,
-      long nextExecTime, long submitTime, String submitUser,
-      ExecutionOptions executionOptions, List<SlaOption> slaOptions, String cronExpression) {
+  public Schedule(final int scheduleId, final int projectId, final String projectName,
+      final String flowName, final String status, final long firstSchedTime,
+      final DateTimeZone timezone, final ReadablePeriod period, final long lastModifyTime,
+      final long nextExecTime, final long submitTime, final String submitUser,
+      final ExecutionOptions executionOptions, final List<SlaOption> slaOptions,
+      final String cronExpression) {
     this.scheduleId = scheduleId;
     this.projectId = projectId;
     this.projectName = projectName;
@@ -103,113 +102,187 @@ public class Schedule {
     this.cronExpression = cronExpression;
   }
 
+  public static ReadablePeriod parsePeriodString(final String periodStr) {
+    final ReadablePeriod period;
+    final char periodUnit = periodStr.charAt(periodStr.length() - 1);
+    if (periodUnit == 'n') {
+      return null;
+    }
+
+    final int periodInt =
+        Integer.parseInt(periodStr.substring(0, periodStr.length() - 1));
+    switch (periodUnit) {
+      case 'M':
+        period = Months.months(periodInt);
+        break;
+      case 'w':
+        period = Weeks.weeks(periodInt);
+        break;
+      case 'd':
+        period = Days.days(periodInt);
+        break;
+      case 'h':
+        period = Hours.hours(periodInt);
+        break;
+      case 'm':
+        period = Minutes.minutes(periodInt);
+        break;
+      case 's':
+        period = Seconds.seconds(periodInt);
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid schedule period unit '"
+            + periodUnit);
+    }
+
+    return period;
+  }
+
+  public static String createPeriodString(final ReadablePeriod period) {
+    String periodStr = "n";
+
+    if (period == null) {
+      return "n";
+    }
+
+    if (period.get(DurationFieldType.months()) > 0) {
+      final int months = period.get(DurationFieldType.months());
+      periodStr = months + "M";
+    } else if (period.get(DurationFieldType.weeks()) > 0) {
+      final int weeks = period.get(DurationFieldType.weeks());
+      periodStr = weeks + "w";
+    } else if (period.get(DurationFieldType.days()) > 0) {
+      final int days = period.get(DurationFieldType.days());
+      periodStr = days + "d";
+    } else if (period.get(DurationFieldType.hours()) > 0) {
+      final int hours = period.get(DurationFieldType.hours());
+      periodStr = hours + "h";
+    } else if (period.get(DurationFieldType.minutes()) > 0) {
+      final int minutes = period.get(DurationFieldType.minutes());
+      periodStr = minutes + "m";
+    } else if (period.get(DurationFieldType.seconds()) > 0) {
+      final int seconds = period.get(DurationFieldType.seconds());
+      periodStr = seconds + "s";
+    }
+
+    return periodStr;
+  }
+
   public ExecutionOptions getExecutionOptions() {
-    return executionOptions;
+    return this.executionOptions;
   }
 
   public List<SlaOption> getSlaOptions() {
-    return slaOptions;
+    return this.slaOptions;
   }
 
-  public void setFlowOptions(ExecutionOptions executionOptions) {
-    this.executionOptions = executionOptions;
+  public void setSlaOptions(final List<SlaOption> slaOptions) {
+    this.slaOptions = slaOptions;
   }
 
-  public void setSlaOptions(List<SlaOption> slaOptions) {
-    this.slaOptions = slaOptions;
+  public void setFlowOptions(final ExecutionOptions executionOptions) {
+    this.executionOptions = executionOptions;
   }
 
   public String getScheduleName() {
-    return projectName + "." + flowName + " (" + projectId + ")";
+    return this.projectName + "." + this.flowName + " (" + this.projectId + ")";
   }
 
   @Override
   public String toString() {
 
-    String underlying = projectName + "." + flowName + " (" + projectId + ")" + " to be run at (starting) " + new DateTime(
-        firstSchedTime).toDateTimeISO();
-    if (period == null && cronExpression == null) {
+    final String underlying =
+        this.projectName + "." + this.flowName + " (" + this.projectId + ")"
+            + " to be run at (starting) "
+            + new DateTime(
+            this.firstSchedTime).toDateTimeISO();
+    if (this.period == null && this.cronExpression == null) {
       return underlying + " non-recurring";
-    } else if (cronExpression != null) {
-      return underlying + " with CronExpression {" + cronExpression + "}";
+    } else if (this.cronExpression != null) {
+      return underlying + " with CronExpression {" + this.cronExpression + "}";
     } else {
-      return underlying + " with precurring period of " + createPeriodString(period);
+      return underlying + " with precurring period of " + createPeriodString(this.period);
     }
   }
 
   public Pair<Integer, String> getScheduleIdentityPair() {
-    return new Pair<Integer, String>(getProjectId(), getFlowName());
+    return new Pair<>(getProjectId(), getFlowName());
   }
 
-  public void setScheduleId(int scheduleId) {
-    this.scheduleId = scheduleId;
+  public int getScheduleId() {
+    return this.scheduleId;
   }
 
-  public int getScheduleId() {
-    return scheduleId;
+  public void setScheduleId(final int scheduleId) {
+    this.scheduleId = scheduleId;
   }
 
   public int getProjectId() {
-    return projectId;
+    return this.projectId;
   }
 
   public String getProjectName() {
-    return projectName;
+    return this.projectName;
   }
 
   public String getFlowName() {
-    return flowName;
+    return this.flowName;
   }
 
   public long getFirstSchedTime() {
-    return firstSchedTime;
+    return this.firstSchedTime;
   }
 
   public DateTimeZone getTimezone() {
-    return timezone;
+    return this.timezone;
   }
 
   public long getLastModifyTime() {
-    return lastModifyTime;
+    return this.lastModifyTime;
   }
 
   public ReadablePeriod getPeriod() {
-    return period;
+    return this.period;
   }
 
   public long getNextExecTime() {
-    return nextExecTime;
+    return this.nextExecTime;
+  }
+
+  public void setNextExecTime(final long nextExecTime) {
+    this.nextExecTime = nextExecTime;
   }
 
   public String getSubmitUser() {
-    return submitUser;
+    return this.submitUser;
   }
 
   public String getStatus() {
-    return status;
+    return this.status;
   }
 
   public long getSubmitTime() {
-    return submitTime;
+    return this.submitTime;
   }
 
   public String getCronExpression() {
-    return cronExpression;
+    return this.cronExpression;
   }
 
   public boolean updateTime() {
-    if (new DateTime(nextExecTime).isAfterNow()) {
+    if (new DateTime(this.nextExecTime).isAfterNow()) {
       return true;
     }
 
-    if (cronExpression != null) {
-      DateTime nextTime = getNextCronRuntime(nextExecTime, timezone, Utils.parseCronExpression(cronExpression, timezone));
+    if (this.cronExpression != null) {
+      final DateTime nextTime = getNextCronRuntime(this.nextExecTime, this.timezone,
+          Utils.parseCronExpression(this.cronExpression, this.timezone));
       this.nextExecTime = nextTime.getMillis();
       return true;
     }
 
-    if (period != null) {
-      DateTime nextTime = getNextRuntime(nextExecTime, timezone, period);
+    if (this.period != null) {
+      final DateTime nextTime = getNextRuntime(this.nextExecTime, this.timezone, this.period);
 
       this.nextExecTime = nextTime.getMillis();
       return true;
@@ -218,13 +291,9 @@ public class Schedule {
     return false;
   }
 
-  public void setNextExecTime(long nextExecTime) {
-    this.nextExecTime = nextExecTime;
-  }
-
-  private DateTime getNextRuntime(long scheduleTime, DateTimeZone timezone,
-      ReadablePeriod period) {
-    DateTime now = new DateTime();
+  private DateTime getNextRuntime(final long scheduleTime, final DateTimeZone timezone,
+      final ReadablePeriod period) {
+    final DateTime now = new DateTime();
     DateTime date = new DateTime(scheduleTime).withZone(timezone);
     int count = 0;
     while (!now.isBefore(date)) {
@@ -246,14 +315,13 @@ public class Schedule {
   }
 
   /**
-   *
-   * @param scheduleTime represents the time when Schedule Servlet receives the Cron Schedule API call.
+   * @param scheduleTime represents the time when Schedule Servlet receives the Cron Schedule API
+   * call.
    * @param timezone is always UTC (after 3.1.0)
-   * @param ce
    * @return the First Scheduled DateTime to run this flow.
    */
-  private DateTime getNextCronRuntime(long scheduleTime, DateTimeZone timezone,
-      CronExpression ce) {
+  private DateTime getNextCronRuntime(final long scheduleTime, final DateTimeZone timezone,
+      final CronExpression ce) {
 
     Date date = new DateTime(scheduleTime).withZone(timezone).toDate();
     if (ce != null) {
@@ -262,83 +330,17 @@ public class Schedule {
     return new DateTime(date);
   }
 
-  public static ReadablePeriod parsePeriodString(String periodStr) {
-    ReadablePeriod period;
-    char periodUnit = periodStr.charAt(periodStr.length() - 1);
-    if (periodUnit == 'n') {
-      return null;
-    }
-
-    int periodInt =
-        Integer.parseInt(periodStr.substring(0, periodStr.length() - 1));
-    switch (periodUnit) {
-    case 'M':
-      period = Months.months(periodInt);
-      break;
-    case 'w':
-      period = Weeks.weeks(periodInt);
-      break;
-    case 'd':
-      period = Days.days(periodInt);
-      break;
-    case 'h':
-      period = Hours.hours(periodInt);
-      break;
-    case 'm':
-      period = Minutes.minutes(periodInt);
-      break;
-    case 's':
-      period = Seconds.seconds(periodInt);
-      break;
-    default:
-      throw new IllegalArgumentException("Invalid schedule period unit '"
-          + periodUnit);
-    }
-
-    return period;
-  }
-
-  public static String createPeriodString(ReadablePeriod period) {
-    String periodStr = "n";
-
-    if (period == null) {
-      return "n";
-    }
-
-    if (period.get(DurationFieldType.months()) > 0) {
-      int months = period.get(DurationFieldType.months());
-      periodStr = months + "M";
-    } else if (period.get(DurationFieldType.weeks()) > 0) {
-      int weeks = period.get(DurationFieldType.weeks());
-      periodStr = weeks + "w";
-    } else if (period.get(DurationFieldType.days()) > 0) {
-      int days = period.get(DurationFieldType.days());
-      periodStr = days + "d";
-    } else if (period.get(DurationFieldType.hours()) > 0) {
-      int hours = period.get(DurationFieldType.hours());
-      periodStr = hours + "h";
-    } else if (period.get(DurationFieldType.minutes()) > 0) {
-      int minutes = period.get(DurationFieldType.minutes());
-      periodStr = minutes + "m";
-    } else if (period.get(DurationFieldType.seconds()) > 0) {
-      int seconds = period.get(DurationFieldType.seconds());
-      periodStr = seconds + "s";
-    }
-
-    return periodStr;
-  }
-
   public Map<String, Object> optionsToObject() {
-    if (executionOptions != null) {
-      HashMap<String, Object> schedObj = new HashMap<String, Object>();
+    if (this.executionOptions != null) {
+      final HashMap<String, Object> schedObj = new HashMap<>();
 
-      if (executionOptions != null) {
-        schedObj.put("executionOptions", executionOptions.toObject());
+      if (this.executionOptions != null) {
+        schedObj.put("executionOptions", this.executionOptions.toObject());
       }
 
-      if (slaOptions != null) {
-        List<Object> slaOptionsObject = new ArrayList<Object>();
-        for (SlaOption sla : slaOptions) {
+      if (this.slaOptions != null) {
+        final List<Object> slaOptionsObject = new ArrayList<>();
+        for (final SlaOption sla : this.slaOptions) {
           slaOptionsObject.add(sla.toObject());
         }
         schedObj.put("slaOptions", slaOptionsObject);
@@ -349,15 +351,14 @@ public class Schedule {
     return null;
   }
 
-  @SuppressWarnings("unchecked")
-  public void createAndSetScheduleOptions(Object obj) {
-    HashMap<String, Object> schedObj = (HashMap<String, Object>) obj;
+  public void createAndSetScheduleOptions(final Object obj) {
+    final HashMap<String, Object> schedObj = (HashMap<String, Object>) obj;
     if (schedObj.containsKey("executionOptions")) {
-      ExecutionOptions execOptions =
+      final ExecutionOptions execOptions =
           ExecutionOptions.createFromObject(schedObj.get("executionOptions"));
       this.executionOptions = execOptions;
     } else if (schedObj.containsKey("flowOptions")) {
-      ExecutionOptions execOptions =
+      final ExecutionOptions execOptions =
           ExecutionOptions.createFromObject(schedObj.get("flowOptions"));
       this.executionOptions = execOptions;
       execOptions.setConcurrentOption(ExecutionOptions.CONCURRENT_OPTION_SKIP);
@@ -368,9 +369,9 @@ public class Schedule {
     }
 
     if (schedObj.containsKey("slaOptions")) {
-      List<Object> slaOptionsObject = (List<Object>) schedObj.get("slaOptions");
-      List<SlaOption> slaOptions = new ArrayList<SlaOption>();
-      for (Object slaObj : slaOptionsObject) {
+      final List<Object> slaOptionsObject = (List<Object>) schedObj.get("slaOptions");
+      final List<SlaOption> slaOptions = new ArrayList<>();
+      for (final Object slaObj : slaOptionsObject) {
         slaOptions.add(SlaOption.fromObject(slaObj));
       }
       this.slaOptions = slaOptions;
@@ -379,11 +380,11 @@ public class Schedule {
   }
 
   public boolean isRecurring() {
-    return period != null || cronExpression != null;
+    return this.period != null || this.cronExpression != null;
   }
 
   public boolean skipPastOccurrences() {
-    return skipPastOccurrences;
+    return this.skipPastOccurrences;
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManager.java b/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManager.java
index e6c1923..60cfe2b 100644
--- a/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManager.java
+++ b/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManager.java
@@ -16,25 +16,22 @@
 
 package azkaban.scheduler;
 
+import azkaban.executor.ExecutionOptions;
+import azkaban.sla.SlaOption;
+import azkaban.trigger.TriggerAgent;
+import azkaban.trigger.TriggerStatus;
+import azkaban.utils.Pair;
+import azkaban.utils.Props;
 import java.util.ArrayList;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.log4j.Logger;
-
 import org.joda.time.DateTimeZone;
 import org.joda.time.ReadablePeriod;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 
-import azkaban.executor.ExecutionOptions;
-import azkaban.sla.SlaOption;
-import azkaban.trigger.TriggerAgent;
-import azkaban.trigger.TriggerStatus;
-import azkaban.utils.Pair;
-import azkaban.utils.Props;
-
 /**
  * The ScheduleManager stores and executes the schedule. It uses a single thread
  * instead and waits until correct loading time for the flow. It will not remove
@@ -44,24 +41,23 @@ import azkaban.utils.Props;
  * TODO kunkun-tang: When new AZ quartz Scheduler comes, we will remove this class.
  */
 public class ScheduleManager implements TriggerAgent {
-  private static Logger logger = Logger.getLogger(ScheduleManager.class);
 
   public static final String triggerSource = "SimpleTimeTrigger";
+  private static final Logger logger = Logger.getLogger(ScheduleManager.class);
   private final DateTimeFormatter _dateFormat = DateTimeFormat
       .forPattern("MM-dd-yyyy HH:mm:ss:SSS");
-  private ScheduleLoader loader;
+  private final ScheduleLoader loader;
 
-  private Map<Integer, Schedule> scheduleIDMap =
+  private final Map<Integer, Schedule> scheduleIDMap =
       new LinkedHashMap<>();
-  private Map<Pair<Integer, String>, Schedule> scheduleIdentityPairMap =
+  private final Map<Pair<Integer, String>, Schedule> scheduleIdentityPairMap =
       new LinkedHashMap<>();
 
   /**
    * Give the schedule manager a loader class that will properly load the
    * schedule.
-   *
    */
-  public ScheduleManager(ScheduleLoader loader) {
+  public ScheduleManager(final ScheduleLoader loader) {
     this.loader = loader;
   }
 
@@ -74,8 +70,8 @@ public class ScheduleManager implements TriggerAgent {
 
   // only do this when using external runner
   private synchronized void updateLocal() throws ScheduleManagerException {
-    List<Schedule> updates = loader.loadUpdatedSchedules();
-    for (Schedule s : updates) {
+    final List<Schedule> updates = this.loader.loadUpdatedSchedules();
+    for (final Schedule s : updates) {
       if (s.getStatus().equals(TriggerStatus.EXPIRED.toString())) {
         onScheduleExpire(s);
       } else {
@@ -84,7 +80,7 @@ public class ScheduleManager implements TriggerAgent {
     }
   }
 
-  private void onScheduleExpire(Schedule s) {
+  private void onScheduleExpire(final Schedule s) {
     removeSchedule(s);
   }
 
@@ -99,23 +95,21 @@ public class ScheduleManager implements TriggerAgent {
 
   /**
    * Retrieves a copy of the list of schedules.
-   *
    */
   public synchronized List<Schedule> getSchedules()
       throws ScheduleManagerException {
 
     updateLocal();
-    return new ArrayList<>(scheduleIDMap.values());
+    return new ArrayList<>(this.scheduleIDMap.values());
   }
 
   /**
    * Returns the scheduled flow for the flow name
-   *
    */
-  public Schedule getSchedule(int projectId, String flowId)
+  public Schedule getSchedule(final int projectId, final String flowId)
       throws ScheduleManagerException {
     updateLocal();
-    return scheduleIdentityPairMap.get(new Pair<>(projectId,
+    return this.scheduleIdentityPairMap.get(new Pair<>(projectId,
         flowId));
   }
 
@@ -124,54 +118,54 @@ public class ScheduleManager implements TriggerAgent {
    *
    * @param scheduleId Schedule ID
    */
-  public Schedule getSchedule(int scheduleId) throws ScheduleManagerException {
+  public Schedule getSchedule(final int scheduleId) throws ScheduleManagerException {
     updateLocal();
-    return scheduleIDMap.get(scheduleId);
+    return this.scheduleIDMap.get(scheduleId);
   }
 
 
   /**
    * Removes the flow from the schedule if it exists.
-   *
    */
-  public synchronized void removeSchedule(Schedule sched) {
-    Pair<Integer, String> identityPairMap = sched.getScheduleIdentityPair();
+  public synchronized void removeSchedule(final Schedule sched) {
+    final Pair<Integer, String> identityPairMap = sched.getScheduleIdentityPair();
 
-    Schedule schedule = scheduleIdentityPairMap.get(identityPairMap);
+    final Schedule schedule = this.scheduleIdentityPairMap.get(identityPairMap);
     if (schedule != null) {
-      scheduleIdentityPairMap.remove(identityPairMap);
+      this.scheduleIdentityPairMap.remove(identityPairMap);
     }
 
-    scheduleIDMap.remove(sched.getScheduleId());
+    this.scheduleIDMap.remove(sched.getScheduleId());
 
     try {
-      loader.removeSchedule(sched);
-    } catch (ScheduleManagerException e) {
+      this.loader.removeSchedule(sched);
+    } catch (final ScheduleManagerException e) {
       logger.error(e);
     }
   }
 
   public Schedule scheduleFlow(final int scheduleId,
-                               final int projectId,
-                               final String projectName,
-                               final String flowName,
-                               final String status,
-                               final long firstSchedTime,
-                               final DateTimeZone timezone,
-                               final ReadablePeriod period,
-                               final long lastModifyTime,
-                               final long nextExecTime,
-                               final long submitTime,
-                               final String submitUser,
-                               ExecutionOptions execOptions,
-                               List<SlaOption> slaOptions) {
-    Schedule sched =
+      final int projectId,
+      final String projectName,
+      final String flowName,
+      final String status,
+      final long firstSchedTime,
+      final DateTimeZone timezone,
+      final ReadablePeriod period,
+      final long lastModifyTime,
+      final long nextExecTime,
+      final long submitTime,
+      final String submitUser,
+      final ExecutionOptions execOptions,
+      final List<SlaOption> slaOptions) {
+    final Schedule sched =
         new Schedule(scheduleId, projectId, projectName, flowName, status,
             firstSchedTime, timezone, period, lastModifyTime, nextExecTime,
             submitTime, submitUser, execOptions, slaOptions, null);
     logger
         .info("Scheduling flow '" + sched.getScheduleName() + "' for "
-            + _dateFormat.print(firstSchedTime) + " with a period of " + (period == null ? "(non-recurring)"
+            + this._dateFormat.print(firstSchedTime) + " with a period of " + (period == null
+            ? "(non-recurring)"
             : period));
 
     insertSchedule(sched);
@@ -183,42 +177,44 @@ public class ScheduleManager implements TriggerAgent {
       final long firstSchedTime, final DateTimeZone timezone,
       final long lastModifyTime,
       final long nextExecTime, final long submitTime, final String submitUser,
-      ExecutionOptions execOptions, List<SlaOption> slaOptions, String cronExpression) {
-    Schedule sched =
+      final ExecutionOptions execOptions, final List<SlaOption> slaOptions,
+      final String cronExpression) {
+    final Schedule sched =
         new Schedule(scheduleId, projectId, projectName, flowName, status,
             firstSchedTime, timezone, null, lastModifyTime, nextExecTime,
             submitTime, submitUser, execOptions, slaOptions, cronExpression);
     logger
         .info("Scheduling flow '" + sched.getScheduleName() + "' for "
-            + _dateFormat.print(firstSchedTime) + " cron Expression = " + cronExpression);
+            + this._dateFormat.print(firstSchedTime) + " cron Expression = " + cronExpression);
 
     insertSchedule(sched);
     return sched;
   }
+
   /**
    * Schedules the flow, but doesn't save the schedule afterwards.
    */
-  private synchronized void internalSchedule(Schedule s) {
-    scheduleIDMap.put(s.getScheduleId(), s);
-    scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), s);
+  private synchronized void internalSchedule(final Schedule s) {
+    this.scheduleIDMap.put(s.getScheduleId(), s);
+    this.scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), s);
   }
 
   /**
    * Adds a flow to the schedule.
    */
-  public synchronized void insertSchedule(Schedule s) {
-    Schedule exist = scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
+  public synchronized void insertSchedule(final Schedule s) {
+    final Schedule exist = this.scheduleIdentityPairMap.get(s.getScheduleIdentityPair());
     if (s.updateTime()) {
       try {
         if (exist == null) {
-          loader.insertSchedule(s);
+          this.loader.insertSchedule(s);
           internalSchedule(s);
         } else {
           s.setScheduleId(exist.getScheduleId());
-          loader.updateSchedule(s);
+          this.loader.updateSchedule(s);
           internalSchedule(s);
         }
-      } catch (ScheduleManagerException e) {
+      } catch (final ScheduleManagerException e) {
         logger.error(e);
       }
     } else {
@@ -229,7 +225,7 @@ public class ScheduleManager implements TriggerAgent {
   }
 
   @Override
-  public void loadTriggerFromProps(Props props) throws ScheduleManagerException {
+  public void loadTriggerFromProps(final Props props) throws ScheduleManagerException {
     throw new ScheduleManagerException("create " + getTriggerSource()
         + " from json not supported yet");
   }
diff --git a/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManagerException.java b/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManagerException.java
index 8efe52e..87ad6e1 100644
--- a/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManagerException.java
+++ b/azkaban-common/src/main/java/azkaban/scheduler/ScheduleManagerException.java
@@ -17,17 +17,18 @@
 package azkaban.scheduler;
 
 public class ScheduleManagerException extends Exception {
+
   private static final long serialVersionUID = 1L;
 
-  public ScheduleManagerException(String message) {
+  public ScheduleManagerException(final String message) {
     super(message);
   }
 
-  public ScheduleManagerException(String message, Throwable cause) {
+  public ScheduleManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
-  public ScheduleManagerException(Exception e) {
+  public ScheduleManagerException(final Exception e) {
     super(e);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java b/azkaban-common/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java
index cd60549..7bd39be 100644
--- a/azkaban-common/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java
+++ b/azkaban-common/src/main/java/azkaban/scheduler/ScheduleStatisticManager.java
@@ -16,71 +16,70 @@
 
 package azkaban.scheduler;
 
+import azkaban.utils.JSONUtils;
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.OutputStream;
 import java.util.HashMap;
 import java.util.Map;
 
-import azkaban.utils.JSONUtils;
-
 /**
  * TODO: This needs to be fleshed out and made into a proper singleton.
  */
 public class ScheduleStatisticManager {
+
   public static final int STAT_NUMBERS = 10;
 
-  private static HashMap<Integer, Object> cacheLock =
-      new HashMap<Integer, Object>();
+  private static final HashMap<Integer, Object> cacheLock =
+      new HashMap<>();
   private static File cacheDirectory;
 
-  public static void invalidateCache(int scheduleId, File cacheDir) {
+  public static void invalidateCache(final int scheduleId, final File cacheDir) {
     setCacheFolder(cacheDir);
     // This should be silent and not fail
     try {
-      Object lock = getLock(scheduleId);
+      final Object lock = getLock(scheduleId);
       synchronized (lock) {
         getCacheFile(scheduleId).delete();
       }
       unLock(scheduleId);
-    } catch (Exception e) {
+    } catch (final Exception e) {
     }
   }
 
-  public static void saveCache(int scheduleId, Map<String, Object> data) {
-    Object lock = getLock(scheduleId);
+  public static void saveCache(final int scheduleId, final Map<String, Object> data) {
+    final Object lock = getLock(scheduleId);
     try {
       synchronized (lock) {
-        File cache = getCacheFile(scheduleId);
+        final File cache = getCacheFile(scheduleId);
         cache.createNewFile();
-        OutputStream output = new FileOutputStream(cache);
+        final OutputStream output = new FileOutputStream(cache);
         try {
           JSONUtils.toJSON(data, output, false);
         } finally {
           output.close();
         }
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       e.printStackTrace();
     }
     unLock(scheduleId);
   }
 
-  public static Map<String, Object> loadCache(int scheduleId) {
-    Object lock = getLock(scheduleId);
+  public static Map<String, Object> loadCache(final int scheduleId) {
+    final Object lock = getLock(scheduleId);
     try {
       synchronized (lock) {
-        File cache = getCacheFile(scheduleId);
+        final File cache = getCacheFile(scheduleId);
         if (cache.exists() && cache.isFile()) {
-          Object dataObj = JSONUtils.parseJSONFromFile(cache);
+          final Object dataObj = JSONUtils.parseJSONFromFile(cache);
           if (dataObj instanceof Map<?, ?>) {
-            @SuppressWarnings("unchecked")
-            Map<String, Object> data = (Map<String, Object>) dataObj;
+            final Map<String, Object> data = (Map<String, Object>) dataObj;
             return data;
           }
         }
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       e.printStackTrace();
     }
     unLock(scheduleId);
@@ -91,13 +90,13 @@ public class ScheduleStatisticManager {
     return cacheDirectory;
   }
 
-  private static File getCacheFile(int scheduleId) {
+  private static File getCacheFile(final int scheduleId) {
     cacheDirectory.mkdirs();
-    File file = new File(cacheDirectory, scheduleId + ".cache");
+    final File file = new File(cacheDirectory, scheduleId + ".cache");
     return file;
   }
 
-  private static Object getLock(int scheduleId) {
+  private static Object getLock(final int scheduleId) {
     Object lock = null;
     synchronized (cacheLock) {
       lock = cacheLock.get(scheduleId);
@@ -110,13 +109,13 @@ public class ScheduleStatisticManager {
     return lock;
   }
 
-  private static void unLock(int scheduleId) {
+  private static void unLock(final int scheduleId) {
     synchronized (cacheLock) {
       cacheLock.remove(scheduleId);
     }
   }
 
-  public static void setCacheFolder(File cacheDir) {
+  public static void setCacheFolder(final File cacheDir) {
     if (cacheDirectory == null) {
       cacheDirectory = new File(cacheDir, "schedule-statistics");
     }
diff --git a/azkaban-common/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java b/azkaban-common/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
index 9f7d5a3..7483169 100644
--- a/azkaban-common/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
+++ b/azkaban-common/src/main/java/azkaban/scheduler/TriggerBasedScheduleLoader.java
@@ -16,13 +16,6 @@
 
 package azkaban.scheduler;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
 import azkaban.trigger.Condition;
 import azkaban.trigger.ConditionChecker;
 import azkaban.trigger.Trigger;
@@ -32,31 +25,36 @@ import azkaban.trigger.TriggerManagerAdapter;
 import azkaban.trigger.TriggerManagerException;
 import azkaban.trigger.builtin.BasicTimeChecker;
 import azkaban.trigger.builtin.ExecuteFlowAction;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.log4j.Logger;
 
 public class TriggerBasedScheduleLoader implements ScheduleLoader {
 
-  private static Logger logger = Logger
+  private static final Logger logger = Logger
       .getLogger(TriggerBasedScheduleLoader.class);
 
-  private TriggerManagerAdapter triggerManager;
+  private final TriggerManagerAdapter triggerManager;
 
-  private String triggerSource;
+  private final String triggerSource;
 
   private long lastUpdateTime = -1;
 
-  public TriggerBasedScheduleLoader(TriggerManager triggerManager,
-      String triggerSource) {
+  public TriggerBasedScheduleLoader(final TriggerManager triggerManager,
+      final String triggerSource) {
     this.triggerManager = triggerManager;
     this.triggerSource = triggerSource;
   }
 
-  private Trigger scheduleToTrigger(Schedule s) {
-    Condition triggerCondition = createTriggerCondition(s);
-    Condition expireCondition = createExpireCondition(s);
-    List<TriggerAction> actions = createActions(s);
+  private Trigger scheduleToTrigger(final Schedule s) {
+    final Condition triggerCondition = createTriggerCondition(s);
+    final Condition expireCondition = createExpireCondition(s);
+    final List<TriggerAction> actions = createActions(s);
 
-    Trigger t = new Trigger.TriggerBuilder(s.getSubmitUser(),
-        triggerSource,
+    final Trigger t = new Trigger.TriggerBuilder(s.getSubmitUser(),
+        this.triggerSource,
         triggerCondition,
         expireCondition,
         actions)
@@ -73,9 +71,9 @@ public class TriggerBasedScheduleLoader implements ScheduleLoader {
     return t;
   }
 
-  private List<TriggerAction> createActions(Schedule s) {
-    List<TriggerAction> actions = new ArrayList<TriggerAction>();
-    ExecuteFlowAction executeAct =
+  private List<TriggerAction> createActions(final Schedule s) {
+    final List<TriggerAction> actions = new ArrayList<>();
+    final ExecuteFlowAction executeAct =
         new ExecuteFlowAction("executeFlowAction", s.getProjectId(),
             s.getProjectName(), s.getFlowName(), s.getSubmitUser(),
             s.getExecutionOptions(), s.getSlaOptions());
@@ -84,50 +82,50 @@ public class TriggerBasedScheduleLoader implements ScheduleLoader {
     return actions;
   }
 
-  private Condition createTriggerCondition(Schedule s) {
-    Map<String, ConditionChecker> checkers =
-        new HashMap<String, ConditionChecker>();
-    ConditionChecker checker =
+  private Condition createTriggerCondition(final Schedule s) {
+    final Map<String, ConditionChecker> checkers =
+        new HashMap<>();
+    final ConditionChecker checker =
         new BasicTimeChecker("BasicTimeChecker_1", s.getFirstSchedTime(),
             s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(),
             s.getPeriod(), s.getCronExpression());
     checkers.put(checker.getId(), checker);
-    String expr = checker.getId() + ".eval()";
-    Condition cond = new Condition(checkers, expr);
+    final String expr = checker.getId() + ".eval()";
+    final Condition cond = new Condition(checkers, expr);
     return cond;
   }
 
   // if failed to trigger, auto expire?
-  private Condition createExpireCondition(Schedule s) {
-    Map<String, ConditionChecker> checkers =
-        new HashMap<String, ConditionChecker>();
-    ConditionChecker checker =
+  private Condition createExpireCondition(final Schedule s) {
+    final Map<String, ConditionChecker> checkers =
+        new HashMap<>();
+    final ConditionChecker checker =
         new BasicTimeChecker("BasicTimeChecker_2", s.getFirstSchedTime(),
             s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(),
             s.getPeriod(), s.getCronExpression());
     checkers.put(checker.getId(), checker);
-    String expr = checker.getId() + ".eval()";
-    Condition cond = new Condition(checkers, expr);
+    final String expr = checker.getId() + ".eval()";
+    final Condition cond = new Condition(checkers, expr);
     return cond;
   }
 
   @Override
-  public void insertSchedule(Schedule s) throws ScheduleManagerException {
-    Trigger t = scheduleToTrigger(s);
+  public void insertSchedule(final Schedule s) throws ScheduleManagerException {
+    final Trigger t = scheduleToTrigger(s);
     try {
-      triggerManager.insertTrigger(t, t.getSubmitUser());
+      this.triggerManager.insertTrigger(t, t.getSubmitUser());
       s.setScheduleId(t.getTriggerId());
-    } catch (TriggerManagerException e) {
+    } catch (final TriggerManagerException e) {
       throw new ScheduleManagerException("Failed to insert new schedule!", e);
     }
   }
 
   @Override
-  public void updateSchedule(Schedule s) throws ScheduleManagerException {
-    Trigger t = scheduleToTrigger(s);
+  public void updateSchedule(final Schedule s) throws ScheduleManagerException {
+    final Trigger t = scheduleToTrigger(s);
     try {
-      triggerManager.updateTrigger(t, t.getSubmitUser());
-    } catch (TriggerManagerException e) {
+      this.triggerManager.updateTrigger(t, t.getSubmitUser());
+    } catch (final TriggerManagerException e) {
       throw new ScheduleManagerException("Failed to update schedule!", e);
     }
   }
@@ -136,11 +134,11 @@ public class TriggerBasedScheduleLoader implements ScheduleLoader {
   @Override
   public synchronized List<Schedule> loadSchedules()
       throws ScheduleManagerException {
-    List<Trigger> triggers = triggerManager.getTriggers(triggerSource);
-    List<Schedule> schedules = new ArrayList<Schedule>();
-    for (Trigger t : triggers) {
-      lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
-      Schedule s = triggerToSchedule(t);
+    final List<Trigger> triggers = this.triggerManager.getTriggers(this.triggerSource);
+    final List<Schedule> schedules = new ArrayList<>();
+    for (final Trigger t : triggers) {
+      this.lastUpdateTime = Math.max(this.lastUpdateTime, t.getLastModifyTime());
+      final Schedule s = triggerToSchedule(t);
       schedules.add(s);
       System.out.println("loaded schedule for "
           + s.getProjectName() + " (project_ID: " + s.getProjectId() + ")");
@@ -149,26 +147,26 @@ public class TriggerBasedScheduleLoader implements ScheduleLoader {
 
   }
 
-  private Schedule triggerToSchedule(Trigger t) throws ScheduleManagerException {
-    Condition triggerCond = t.getTriggerCondition();
-    Map<String, ConditionChecker> checkers = triggerCond.getCheckers();
+  private Schedule triggerToSchedule(final Trigger t) throws ScheduleManagerException {
+    final Condition triggerCond = t.getTriggerCondition();
+    final Map<String, ConditionChecker> checkers = triggerCond.getCheckers();
     BasicTimeChecker ck = null;
-    for (ConditionChecker checker : checkers.values()) {
+    for (final ConditionChecker checker : checkers.values()) {
       if (checker.getType().equals(BasicTimeChecker.type)) {
         ck = (BasicTimeChecker) checker;
         break;
       }
     }
-    List<TriggerAction> actions = t.getActions();
+    final List<TriggerAction> actions = t.getActions();
     ExecuteFlowAction act = null;
-    for (TriggerAction action : actions) {
+    for (final TriggerAction action : actions) {
       if (action.getType().equals(ExecuteFlowAction.type)) {
         act = (ExecuteFlowAction) action;
         break;
       }
     }
     if (ck != null && act != null) {
-      Schedule s =
+      final Schedule s =
           new Schedule(t.getTriggerId(), act.getProjectId(),
               act.getProjectName(), act.getFlowName(),
               t.getStatus().toString(), ck.getFirstCheckTime(),
@@ -184,35 +182,35 @@ public class TriggerBasedScheduleLoader implements ScheduleLoader {
   }
 
   @Override
-  public void removeSchedule(Schedule s) throws ScheduleManagerException {
+  public void removeSchedule(final Schedule s) throws ScheduleManagerException {
     try {
-      triggerManager.removeTrigger(s.getScheduleId(), s.getSubmitUser());
-    } catch (TriggerManagerException e) {
+      this.triggerManager.removeTrigger(s.getScheduleId(), s.getSubmitUser());
+    } catch (final TriggerManagerException e) {
       throw new ScheduleManagerException(e.getMessage());
     }
 
   }
 
   @Override
-  public void updateNextExecTime(Schedule s) throws ScheduleManagerException {
+  public void updateNextExecTime(final Schedule s) throws ScheduleManagerException {
 
   }
 
   @Override
   public synchronized List<Schedule> loadUpdatedSchedules()
       throws ScheduleManagerException {
-    List<Trigger> triggers;
+    final List<Trigger> triggers;
     try {
       triggers =
-          triggerManager.getTriggerUpdates(triggerSource, lastUpdateTime);
-    } catch (TriggerManagerException e) {
+          this.triggerManager.getTriggerUpdates(this.triggerSource, this.lastUpdateTime);
+    } catch (final TriggerManagerException e) {
       e.printStackTrace();
       throw new ScheduleManagerException(e);
     }
-    List<Schedule> schedules = new ArrayList<Schedule>();
-    for (Trigger t : triggers) {
-      lastUpdateTime = Math.max(lastUpdateTime, t.getLastModifyTime());
-      Schedule s = triggerToSchedule(t);
+    final List<Schedule> schedules = new ArrayList<>();
+    for (final Trigger t : triggers) {
+      this.lastUpdateTime = Math.max(this.lastUpdateTime, t.getLastModifyTime());
+      final Schedule s = triggerToSchedule(t);
       schedules.add(s);
       System.out.println("loaded schedule for "
           + s.getProjectName() + " (project_ID: " + s.getProjectId() + ")");
diff --git a/azkaban-common/src/main/java/azkaban/server/AbstractServiceServlet.java b/azkaban-common/src/main/java/azkaban/server/AbstractServiceServlet.java
index 63af64a..009fe06 100644
--- a/azkaban-common/src/main/java/azkaban/server/AbstractServiceServlet.java
+++ b/azkaban-common/src/main/java/azkaban/server/AbstractServiceServlet.java
@@ -17,60 +17,58 @@
 package azkaban.server;
 
 import azkaban.Constants;
-
 import java.io.IOException;
 import java.io.OutputStream;
-
 import javax.servlet.ServletConfig;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
 import org.codehaus.jackson.map.ObjectMapper;
 
 public class AbstractServiceServlet extends HttpServlet {
 
-  private static final long serialVersionUID = 1L;
   public static final String JSON_MIME_TYPE = "application/json";
-
+  private static final long serialVersionUID = 1L;
   private AzkabanServer application;
 
   @Override
-  public void init(ServletConfig config) throws ServletException {
-    application =
-        (AzkabanServer) config.getServletContext().getAttribute(Constants.AZKABAN_SERVLET_CONTEXT_KEY);
+  public void init(final ServletConfig config) throws ServletException {
+    this.application =
+        (AzkabanServer) config.getServletContext()
+            .getAttribute(Constants.AZKABAN_SERVLET_CONTEXT_KEY);
 
-    if (application == null) {
+    if (this.application == null) {
       throw new IllegalStateException(
           "No batch application is defined in the servlet context!");
     }
   }
 
-  protected void writeJSON(HttpServletResponse resp, Object obj)
+  protected void writeJSON(final HttpServletResponse resp, final Object obj)
       throws IOException {
     resp.setContentType(JSON_MIME_TYPE);
-    ObjectMapper mapper = new ObjectMapper();
-    OutputStream stream = resp.getOutputStream();
+    final ObjectMapper mapper = new ObjectMapper();
+    final OutputStream stream = resp.getOutputStream();
     mapper.writeValue(stream, obj);
   }
 
-  public boolean hasParam(HttpServletRequest request, String param) {
+  public boolean hasParam(final HttpServletRequest request, final String param) {
     return request.getParameter(param) != null;
   }
 
-  public String getParam(HttpServletRequest request, String name)
+  public String getParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = request.getParameter(name);
-    if (p == null)
+    final String p = request.getParameter(name);
+    if (p == null) {
       throw new ServletException("Missing required parameter '" + name + "'.");
-    else
+    } else {
       return p;
+    }
   }
 
-  public String getParam(HttpServletRequest request, String name,
-      String defaultVal) {
-    String p = request.getParameter(name);
+  public String getParam(final HttpServletRequest request, final String name,
+      final String defaultVal) {
+    final String p = request.getParameter(name);
     if (p == null) {
       return defaultVal;
     }
@@ -78,35 +76,36 @@ public class AbstractServiceServlet extends HttpServlet {
     return p;
   }
 
-  public int getIntParam(HttpServletRequest request, String name)
+  public int getIntParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = getParam(request, name);
+    final String p = getParam(request, name);
     return Integer.parseInt(p);
   }
 
-  public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
+  public int getIntParam(final HttpServletRequest request, final String name,
+      final int defaultVal) {
     if (hasParam(request, name)) {
       try {
         return getIntParam(request, name);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         return defaultVal;
       }
     }
     return defaultVal;
   }
 
-  public long getLongParam(HttpServletRequest request, String name)
+  public long getLongParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = getParam(request, name);
+    final String p = getParam(request, name);
     return Long.parseLong(p);
   }
 
-  public long getLongParam(HttpServletRequest request, String name,
-      long defaultVal) {
+  public long getLongParam(final HttpServletRequest request, final String name,
+      final long defaultVal) {
     if (hasParam(request, name)) {
       try {
         return getLongParam(request, name);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         return defaultVal;
       }
     }
diff --git a/azkaban-common/src/main/java/azkaban/server/AzkabanServer.java b/azkaban-common/src/main/java/azkaban/server/AzkabanServer.java
index adec51b..ad9d1ed 100644
--- a/azkaban-common/src/main/java/azkaban/server/AzkabanServer.java
+++ b/azkaban-common/src/main/java/azkaban/server/AzkabanServer.java
@@ -16,31 +16,30 @@
 
 package azkaban.server;
 
+import static azkaban.Constants.DEFAULT_PORT_NUMBER;
+import static azkaban.Constants.DEFAULT_SSL_PORT_NUMBER;
+
+import azkaban.Constants;
+import azkaban.server.session.SessionCache;
+import azkaban.user.UserManager;
+import azkaban.utils.Props;
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.util.Arrays;
-
 import joptsimple.OptionParser;
 import joptsimple.OptionSet;
 import joptsimple.OptionSpec;
-
 import org.apache.log4j.Logger;
 import org.apache.velocity.app.VelocityEngine;
 
-import azkaban.Constants;
-import azkaban.user.UserManager;
-import azkaban.utils.Props;
-import azkaban.server.session.SessionCache;
-
-import static azkaban.Constants.*;
-
 
 public abstract class AzkabanServer {
+
   private static final Logger logger = Logger.getLogger(AzkabanServer.class);
   private static Props azkabanProperties = null;
 
-  public static Props loadProps(String[] args) {
+  public static Props loadProps(final String[] args) {
     azkabanProperties = loadProps(args, new OptionParser());
     return azkabanProperties;
   }
@@ -49,8 +48,8 @@ public abstract class AzkabanServer {
     return azkabanProperties;
   }
 
-  public static Props loadProps(String[] args, OptionParser parser) {
-    OptionSpec<String> configDirectory = parser.acceptsAll(
+  public static Props loadProps(final String[] args, final OptionParser parser) {
+    final OptionSpec<String> configDirectory = parser.acceptsAll(
         Arrays.asList("c", "conf"), "The conf directory for Azkaban.")
         .withRequiredArg()
         .describedAs("conf")
@@ -58,12 +57,12 @@ public abstract class AzkabanServer {
 
     // Grabbing the azkaban settings from the conf directory.
     Props azkabanSettings = null;
-    OptionSet options = parser.parse(args);
+    final OptionSet options = parser.parse(args);
 
     if (options.has(configDirectory)) {
-      String path = options.valueOf(configDirectory);
+      final String path = options.valueOf(configDirectory);
       logger.info("Loading azkaban settings file from " + path);
-      File dir = new File(path);
+      final File dir = new File(path);
       if (!dir.exists()) {
         logger.error("Conf directory " + path + " doesn't exist.");
       } else if (!dir.isDirectory()) {
@@ -83,7 +82,7 @@ public abstract class AzkabanServer {
     return azkabanSettings;
   }
 
-  private static void updateDerivedConfigs(Props azkabanSettings) {
+  private static void updateDerivedConfigs(final Props azkabanSettings) {
     final boolean isSslEnabled = azkabanSettings.getBoolean("jetty.use.ssl", true);
     final int port = isSslEnabled
         ? azkabanSettings.getInt("jetty.ssl.port", DEFAULT_SSL_PORT_NUMBER)
@@ -96,9 +95,9 @@ public abstract class AzkabanServer {
     azkabanSettings.put("server.useSSL", String.valueOf(isSslEnabled));
   }
 
-  public static Props loadAzkabanConfigurationFromDirectory(File dir) {
-    File azkabanPrivatePropsFile = new File(dir, Constants.AZKABAN_PRIVATE_PROPERTIES_FILE);
-    File azkabanPropsFile = new File(dir, Constants.AZKABAN_PROPERTIES_FILE);
+  public static Props loadAzkabanConfigurationFromDirectory(final File dir) {
+    final File azkabanPrivatePropsFile = new File(dir, Constants.AZKABAN_PRIVATE_PROPERTIES_FILE);
+    final File azkabanPropsFile = new File(dir, Constants.AZKABAN_PROPERTIES_FILE);
 
     Props props = null;
     try {
@@ -112,9 +111,9 @@ public abstract class AzkabanServer {
         logger.info("Loading azkaban properties file");
         props = new Props(props, azkabanPropsFile);
       }
-    } catch (FileNotFoundException e) {
+    } catch (final FileNotFoundException e) {
       logger.error("File not found. Could not load azkaban config file", e);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       logger.error("File found, but error reading. Could not load azkaban config file", e);
     }
     return props;
@@ -126,7 +125,7 @@ public abstract class AzkabanServer {
    * @return Props instance
    */
   private static Props loadConfigurationFromAzkabanHome() {
-    String azkabanHome = System.getenv("AZKABAN_HOME");
+    final String azkabanHome = System.getenv("AZKABAN_HOME");
 
     if (azkabanHome == null) {
       logger.error("AZKABAN_HOME not set. Will try default.");
@@ -137,7 +136,7 @@ public abstract class AzkabanServer {
       return null;
     }
 
-    File confPath = new File(azkabanHome, Constants.DEFAULT_CONF_PATH);
+    final File confPath = new File(azkabanHome, Constants.DEFAULT_CONF_PATH);
     if (!confPath.exists() || !confPath.isDirectory() || !confPath.canRead()) {
       logger.error(azkabanHome + " does not contain a readable conf directory.");
       return null;
diff --git a/azkaban-common/src/main/java/azkaban/server/HttpRequestUtils.java b/azkaban-common/src/main/java/azkaban/server/HttpRequestUtils.java
index 678a1f9..83f51e7 100644
--- a/azkaban-common/src/main/java/azkaban/server/HttpRequestUtils.java
+++ b/azkaban-common/src/main/java/azkaban/server/HttpRequestUtils.java
@@ -16,17 +16,6 @@
 
 package azkaban.server;
 
-import java.util.Arrays;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServletRequest;
-
-import org.apache.commons.lang.StringUtils;
-
 import azkaban.executor.ExecutionOptions;
 import azkaban.executor.ExecutionOptions.FailureAction;
 import azkaban.executor.ExecutorManagerException;
@@ -37,14 +26,23 @@ import azkaban.user.Role;
 import azkaban.user.User;
 import azkaban.user.UserManager;
 import azkaban.utils.JSONUtils;
+import java.util.Arrays;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import org.apache.commons.lang.StringUtils;
 
 public class HttpRequestUtils {
-  public static ExecutionOptions parseFlowOptions(HttpServletRequest req)
+
+  public static ExecutionOptions parseFlowOptions(final HttpServletRequest req)
       throws ServletException {
-    ExecutionOptions execOptions = new ExecutionOptions();
+    final ExecutionOptions execOptions = new ExecutionOptions();
 
     if (hasParam(req, "failureAction")) {
-      String option = getParam(req, "failureAction");
+      final String option = getParam(req, "failureAction");
       if (option.equals("finishCurrent")) {
         execOptions.setFailureAction(FailureAction.FINISH_CURRENTLY_RUNNING);
       } else if (option.equals("cancelImmediately")) {
@@ -55,25 +53,25 @@ public class HttpRequestUtils {
     }
 
     if (hasParam(req, "failureEmailsOverride")) {
-      boolean override = getBooleanParam(req, "failureEmailsOverride", false);
+      final boolean override = getBooleanParam(req, "failureEmailsOverride", false);
       execOptions.setFailureEmailsOverridden(override);
     }
     if (hasParam(req, "successEmailsOverride")) {
-      boolean override = getBooleanParam(req, "successEmailsOverride", false);
+      final boolean override = getBooleanParam(req, "successEmailsOverride", false);
       execOptions.setSuccessEmailsOverridden(override);
     }
 
     if (hasParam(req, "failureEmails")) {
-      String emails = getParam(req, "failureEmails");
+      final String emails = getParam(req, "failureEmails");
       if (!emails.isEmpty()) {
-        String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
+        final String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
         execOptions.setFailureEmails(Arrays.asList(emailSplit));
       }
     }
     if (hasParam(req, "successEmails")) {
-      String emails = getParam(req, "successEmails");
+      final String emails = getParam(req, "successEmails");
       if (!emails.isEmpty()) {
-        String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
+        final String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+");
         execOptions.setSuccessEmails(Arrays.asList(emailSplit));
       }
     }
@@ -91,11 +89,11 @@ public class HttpRequestUtils {
       concurrentOption = getParam(req, "concurrentOption");
       execOptions.setConcurrentOption(concurrentOption);
       if (concurrentOption.equals("pipeline")) {
-        int pipelineLevel = getIntParam(req, "pipelineLevel");
+        final int pipelineLevel = getIntParam(req, "pipelineLevel");
         execOptions.setPipelineLevel(pipelineLevel);
       } else if (concurrentOption.equals("queue")) {
         // Not yet implemented
-        int queueLevel = getIntParam(req, "queueLevel", 1);
+        final int queueLevel = getIntParam(req, "queueLevel", 1);
         execOptions.setPipelineLevel(queueLevel);
       }
     }
@@ -105,14 +103,13 @@ public class HttpRequestUtils {
       execOptions.setMailCreator(mailCreator);
     }
 
-    Map<String, String> flowParamGroup = getParamGroup(req, "flowOverride");
+    final Map<String, String> flowParamGroup = getParamGroup(req, "flowOverride");
     execOptions.addAllFlowParameters(flowParamGroup);
 
     if (hasParam(req, "disabled")) {
-      String disabled = getParam(req, "disabled");
+      final String disabled = getParam(req, "disabled");
       if (!disabled.isEmpty()) {
-        @SuppressWarnings("unchecked")
-        List<Object> disabledList =
+        final List<Object> disabledList =
             (List<Object>) JSONUtils.parseJSONFromStringQuiet(disabled);
         execOptions.setDisabledJobs(disabledList);
       }
@@ -130,12 +127,13 @@ public class HttpRequestUtils {
    * @param user
    * </pre>
    */
-  public static void filterAdminOnlyFlowParams(UserManager userManager,
-    ExecutionOptions options, User user)  throws ExecutorManagerException {
-    if (options == null || options.getFlowParameters() == null)
+  public static void filterAdminOnlyFlowParams(final UserManager userManager,
+      final ExecutionOptions options, final User user) throws ExecutorManagerException {
+    if (options == null || options.getFlowParameters() == null) {
       return;
+    }
 
-    Map<String, String> params = options.getFlowParameters();
+    final Map<String, String> params = options.getFlowParameters();
     // is azkaban Admin
     if (!hasPermission(userManager, user, Type.ADMIN)) {
       params.remove(ExecutionOptions.FLOW_PRIORITY);
@@ -149,14 +147,13 @@ public class HttpRequestUtils {
   /**
    * parse a string as number and throws exception if parsed value is not a
    * valid integer
-   * @param params
-   * @param paramName
+   *
    * @throws ExecutorManagerException if paramName is not a valid integer
    */
-  public static boolean validateIntegerParam(Map<String, String> params,
-    String paramName) throws ExecutorManagerException {
+  public static boolean validateIntegerParam(final Map<String, String> params,
+      final String paramName) throws ExecutorManagerException {
     if (params != null && params.containsKey(paramName)
-      && !StringUtils.isNumeric(params.get(paramName))) {
+        && !StringUtils.isNumeric(params.get(paramName))) {
       throw new ExecutorManagerException(paramName + " should be an integer");
     }
     return true;
@@ -164,18 +161,13 @@ public class HttpRequestUtils {
 
   /**
    * returns true if user has access of type
-   *
-   * @param userManager
-   * @param user
-   * @param type
-   * @return
    */
-  public static boolean hasPermission(UserManager userManager, User user,
-    Permission.Type type) {
-    for (String roleName : user.getRoles()) {
-      Role role = userManager.getRole(roleName);
+  public static boolean hasPermission(final UserManager userManager, final User user,
+      final Permission.Type type) {
+    for (final String roleName : user.getRoles()) {
+      final Role role = userManager.getRole(roleName);
       if (role.getPermission().isPermissionSet(type)
-        || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
+          || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
         return true;
       }
     }
@@ -184,27 +176,18 @@ public class HttpRequestUtils {
 
   /**
    * Checks for the existance of the parameter in the request
-   *
-   * @param request
-   * @param param
-   * @return
    */
-  public static boolean hasParam(HttpServletRequest request, String param) {
+  public static boolean hasParam(final HttpServletRequest request, final String param) {
     return request.getParameter(param) != null;
   }
 
   /**
    * Retrieves the param from the http servlet request. Will throw an exception
    * if not found
-   *
-   * @param request
-   * @param name
-   * @return
-   * @throws ServletException
    */
-  public static String getParam(HttpServletRequest request, String name)
+  public static String getParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = request.getParameter(name);
+    final String p = request.getParameter(name);
     if (p == null) {
       throw new ServletException("Missing required parameter '" + name + "'.");
     } else {
@@ -214,16 +197,10 @@ public class HttpRequestUtils {
 
   /**
    * Retrieves the param from the http servlet request.
-   *
-   * @param request
-   * @param name
-   * @param default
-   *
-   * @return
    */
-  public static String getParam(HttpServletRequest request, String name,
-      String defaultVal) {
-    String p = request.getParameter(name);
+  public static String getParam(final HttpServletRequest request, final String name,
+      final String defaultVal) {
+    final String p = request.getParameter(name);
     if (p == null) {
       return defaultVal;
     }
@@ -233,24 +210,19 @@ public class HttpRequestUtils {
   /**
    * Returns the param and parses it into an int. Will throw an exception if not
    * found, or a parse error if the type is incorrect.
-   *
-   * @param request
-   * @param name
-   * @return
-   * @throws ServletException
    */
-  public static int getIntParam(HttpServletRequest request, String name)
+  public static int getIntParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = getParam(request, name);
+    final String p = getParam(request, name);
     return Integer.parseInt(p);
   }
 
-  public static int getIntParam(HttpServletRequest request, String name,
-      int defaultVal) {
+  public static int getIntParam(final HttpServletRequest request, final String name,
+      final int defaultVal) {
     if (hasParam(request, name)) {
       try {
         return getIntParam(request, name);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         return defaultVal;
       }
     }
@@ -258,18 +230,18 @@ public class HttpRequestUtils {
     return defaultVal;
   }
 
-  public static boolean getBooleanParam(HttpServletRequest request, String name)
+  public static boolean getBooleanParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = getParam(request, name);
+    final String p = getParam(request, name);
     return Boolean.parseBoolean(p);
   }
 
-  public static boolean getBooleanParam(HttpServletRequest request,
-      String name, boolean defaultVal) {
+  public static boolean getBooleanParam(final HttpServletRequest request,
+      final String name, final boolean defaultVal) {
     if (hasParam(request, name)) {
       try {
         return getBooleanParam(request, name);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         return defaultVal;
       }
     }
@@ -277,18 +249,18 @@ public class HttpRequestUtils {
     return defaultVal;
   }
 
-  public static long getLongParam(HttpServletRequest request, String name)
+  public static long getLongParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = getParam(request, name);
+    final String p = getParam(request, name);
     return Long.valueOf(p);
   }
 
-  public static long getLongParam(HttpServletRequest request, String name,
-      long defaultVal) {
+  public static long getLongParam(final HttpServletRequest request, final String name,
+      final long defaultVal) {
     if (hasParam(request, name)) {
       try {
         return getLongParam(request, name);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         return defaultVal;
       }
     }
@@ -296,15 +268,14 @@ public class HttpRequestUtils {
     return defaultVal;
   }
 
-  public static Map<String, String> getParamGroup(HttpServletRequest request,
-      String groupName) throws ServletException {
-    @SuppressWarnings("unchecked")
-    Enumeration<String> enumerate = request.getParameterNames();
-    String matchString = groupName + "[";
+  public static Map<String, String> getParamGroup(final HttpServletRequest request,
+      final String groupName) throws ServletException {
+    final Enumeration<String> enumerate = request.getParameterNames();
+    final String matchString = groupName + "[";
 
-    HashMap<String, String> groupParam = new HashMap<String, String>();
+    final HashMap<String, String> groupParam = new HashMap<>();
     while (enumerate.hasMoreElements()) {
-      String str = (String) enumerate.nextElement();
+      final String str = (String) enumerate.nextElement();
       if (str.startsWith(matchString)) {
         groupParam.put(str.substring(matchString.length(), str.length() - 1),
             request.getParameter(str));
diff --git a/azkaban-common/src/main/java/azkaban/server/session/Session.java b/azkaban-common/src/main/java/azkaban/server/session/Session.java
index dd8ce9d..8d2a226 100644
--- a/azkaban-common/src/main/java/azkaban/server/session/Session.java
+++ b/azkaban-common/src/main/java/azkaban/server/session/Session.java
@@ -15,27 +15,24 @@
  */
 package azkaban.server.session;
 
+import azkaban.user.User;
 import java.util.HashMap;
 import java.util.Map;
 
-import azkaban.user.User;
-
 /**
  * Container for the session, mapping session id to user in map
  */
 public class Session {
+
   private final User user;
   private final String sessionId;
   private final String ip;
-  private Map<String, Object> sessionData = new HashMap<String, Object>();
+  private final Map<String, Object> sessionData = new HashMap<>();
 
   /**
    * Constructor for the session
-   *
-   * @param sessionId
-   * @param user
    */
-  public Session(String sessionId, User user, String ip) {
+  public Session(final String sessionId, final User user, final String ip) {
     this.user = user;
     this.sessionId = sessionId;
     this.ip = ip;
@@ -43,31 +40,27 @@ public class Session {
 
   /**
    * Returns the User object
-   *
-   * @return
    */
   public User getUser() {
-    return user;
+    return this.user;
   }
 
   /**
    * Returns the sessionId
-   *
-   * @return
    */
   public String getSessionId() {
-    return sessionId;
+    return this.sessionId;
   }
 
   public String getIp() {
-    return ip;
+    return this.ip;
   }
 
-  public void setSessionData(String key, Object value) {
-    sessionData.put(key, value);
+  public void setSessionData(final String key, final Object value) {
+    this.sessionData.put(key, value);
   }
 
-  public Object getSessionData(String key) {
-    return sessionData.get(key);
+  public Object getSessionData(final String key) {
+    return this.sessionData.get(key);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/server/session/SessionCache.java b/azkaban-common/src/main/java/azkaban/server/session/SessionCache.java
index 20f4496..7351f7d 100644
--- a/azkaban-common/src/main/java/azkaban/server/session/SessionCache.java
+++ b/azkaban-common/src/main/java/azkaban/server/session/SessionCache.java
@@ -16,13 +16,11 @@
 
 package azkaban.server.session;
 
-import com.google.common.cache.CacheBuilder;
+import azkaban.utils.Props;
 import com.google.common.cache.Cache;
-
+import com.google.common.cache.CacheBuilder;
 import java.util.concurrent.TimeUnit;
 
-import azkaban.utils.Props;
-
 /**
  * Cache for web session.
  *
@@ -32,19 +30,18 @@ import azkaban.utils.Props;
  * set to 1 days.
  */
 public class SessionCache {
+
   private static final int MAX_NUM_SESSIONS = 10000;
   private static final long SESSION_TIME_TO_LIVE = 24 * 60 * 60 * 1000L;
 
   // private CacheManager manager = CacheManager.create();
-  private Cache<String, Session> cache;
+  private final Cache<String, Session> cache;
 
   /**
    * Constructor taking global props.
-   *
-   * @param props
    */
-  public SessionCache(Props props) {
-    cache = CacheBuilder.newBuilder()
+  public SessionCache(final Props props) {
+    this.cache = CacheBuilder.newBuilder()
         .maximumSize(props.getInt("max.num.sessions", MAX_NUM_SESSIONS))
         .expireAfterAccess(
             props.getLong("session.time.to.live", SESSION_TIME_TO_LIVE),
@@ -54,32 +51,23 @@ public class SessionCache {
 
   /**
    * Returns the cached session using the session id.
-   *
-   * @param sessionId
-   * @return
    */
-  public Session getSession(String sessionId) {
-    Session elem = cache.getIfPresent(sessionId);
+  public Session getSession(final String sessionId) {
+    final Session elem = this.cache.getIfPresent(sessionId);
     return elem;
   }
 
   /**
    * Adds a session to the cache. Accessible through the session ID.
-   *
-   * @param id
-   * @param session
    */
-  public void addSession(Session session) {
-    cache.put(session.getSessionId(), session);
+  public void addSession(final Session session) {
+    this.cache.put(session.getSessionId(), session);
   }
 
   /**
    * Removes the session from the cache.
-   *
-   * @param id
-   * @return
    */
-  public void removeSession(String id) {
-    cache.invalidate(id);
+  public void removeSession(final String id) {
+    this.cache.invalidate(id);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/ServiceProvider.java b/azkaban-common/src/main/java/azkaban/ServiceProvider.java
index 9a67e05..bf14c89 100644
--- a/azkaban-common/src/main/java/azkaban/ServiceProvider.java
+++ b/azkaban-common/src/main/java/azkaban/ServiceProvider.java
@@ -17,18 +17,18 @@
 
 package azkaban;
 
-import com.google.inject.Injector;
+import static com.google.common.base.Preconditions.checkState;
+import static java.util.Objects.requireNonNull;
 
-import static com.google.common.base.Preconditions.*;
-import static java.util.Objects.*;
+import com.google.inject.Injector;
 
 
 /**
- * The {@link ServiceProvider} class is an interface to fetch any external dependency. Under the hood it simply
- * maintains a Guice {@link Injector} which is used to fetch the required service type. The current direction of
- * utilization of Guice is to gradually move classes into the Guice scope so that Guice can automatically resolve
- * dependencies and provide the required services directly.
- *
+ * The {@link ServiceProvider} class is an interface to fetch any external dependency. Under the
+ * hood it simply maintains a Guice {@link Injector} which is used to fetch the required service
+ * type. The current direction of utilization of Guice is to gradually move classes into the Guice
+ * scope so that Guice can automatically resolve dependencies and provide the required services
+ * directly.
  */
 public enum ServiceProvider {
   SERVICE_PROVIDER;
@@ -37,9 +37,10 @@ public enum ServiceProvider {
 
   /**
    * Ensure that injector is set only once!
+   *
    * @param injector Guice injector is itself used for providing services.
    */
-  public synchronized void setInjector(Injector injector) {
+  public synchronized void setInjector(final Injector injector) {
     checkState(this.injector == null, "Injector is already set");
     this.injector = requireNonNull(injector, "arg injector is null");
   }
@@ -48,8 +49,8 @@ public enum ServiceProvider {
     this.injector = null;
   }
 
-  public <T> T getInstance(Class<T> clazz) {
-    return requireNonNull(injector).getInstance(clazz);
+  public <T> T getInstance(final Class<T> clazz) {
+    return requireNonNull(this.injector).getInstance(clazz);
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/sla/SlaOption.java b/azkaban-common/src/main/java/azkaban/sla/SlaOption.java
index 4683b51..526b90f 100644
--- a/azkaban-common/src/main/java/azkaban/sla/SlaOption.java
+++ b/azkaban-common/src/main/java/azkaban/sla/SlaOption.java
@@ -16,22 +16,19 @@
 
 package azkaban.sla;
 
-import com.google.common.collect.ImmutableSet;
+import azkaban.executor.ExecutableFlow;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
-
 import java.util.Set;
 import java.util.stream.Collectors;
 import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormat;
 import org.joda.time.format.DateTimeFormatter;
 
-import azkaban.executor.ExecutableFlow;
-
 public class SlaOption {
 
   public static final String TYPE_FLOW_FINISH = "FlowFinish";
@@ -49,94 +46,145 @@ public class SlaOption {
   public static final String ALERT_TYPE = "SlaAlertType";
   public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow";
   public static final String ACTION_ALERT = "SlaAlert";
-
+  private static final DateTimeFormatter fmt = DateTimeFormat
+      .forPattern("MM/dd, YYYY HH:mm");
   private String type;
   private Map<String, Object> info;
   private List<String> actions;
 
-  private static DateTimeFormatter fmt = DateTimeFormat
-      .forPattern("MM/dd, YYYY HH:mm");
-
-  public SlaOption(String type, List<String> actions, Map<String, Object> info) {
+  public SlaOption(final String type, final List<String> actions, final Map<String, Object> info) {
     this.type = type;
     this.info = info;
     this.actions = actions;
   }
 
-  public static List<SlaOption> getJobLevelSLAOptions(ExecutableFlow flow) {
-    Set<String> jobLevelSLAs = new HashSet<>(Arrays.asList(SlaOption.TYPE_JOB_FINISH, SlaOption.TYPE_JOB_SUCCEED));
-    return flow.getSlaOptions().stream().filter(slaOption -> jobLevelSLAs.contains(slaOption.getType()))
+  public static List<SlaOption> getJobLevelSLAOptions(final ExecutableFlow flow) {
+    final Set<String> jobLevelSLAs = new HashSet<>(
+        Arrays.asList(SlaOption.TYPE_JOB_FINISH, SlaOption.TYPE_JOB_SUCCEED));
+    return flow.getSlaOptions().stream()
+        .filter(slaOption -> jobLevelSLAs.contains(slaOption.getType()))
         .collect(Collectors.toList());
   }
 
-  public static List<SlaOption> getFlowLevelSLAOptions(ExecutableFlow flow) {
-    Set<String> flowLevelSLAs = new HashSet<>(Arrays.asList(SlaOption.TYPE_FLOW_FINISH, SlaOption.TYPE_FLOW_SUCCEED));
-    return flow.getSlaOptions().stream().filter(slaOption -> flowLevelSLAs.contains(slaOption.getType()))
+  public static List<SlaOption> getFlowLevelSLAOptions(final ExecutableFlow flow) {
+    final Set<String> flowLevelSLAs = new HashSet<>(
+        Arrays.asList(SlaOption.TYPE_FLOW_FINISH, SlaOption.TYPE_FLOW_SUCCEED));
+    return flow.getSlaOptions().stream()
+        .filter(slaOption -> flowLevelSLAs.contains(slaOption.getType()))
         .collect(Collectors.toList());
   }
 
+  public static SlaOption fromObject(final Object object) {
+
+    final HashMap<String, Object> slaObj = (HashMap<String, Object>) object;
+
+    final String type = (String) slaObj.get("type");
+    final List<String> actions = (List<String>) slaObj.get("actions");
+    final Map<String, Object> info = (Map<String, Object>) slaObj.get("info");
+
+    return new SlaOption(type, actions, info);
+  }
+
+  public static String createSlaMessage(final SlaOption slaOption, final ExecutableFlow flow) {
+    final String type = slaOption.getType();
+    final int execId = flow.getExecutionId();
+    if (type.equals(SlaOption.TYPE_FLOW_FINISH)) {
+      final String flowName =
+          (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+      final String duration =
+          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+      final String basicinfo =
+          "SLA Alert: Your flow " + flowName + " failed to FINISH within "
+              + duration + "</br>";
+      final String expected =
+          "Here is details : </br>" + "Flow " + flowName + " in execution "
+              + execId + " is expected to FINISH within " + duration + " from "
+              + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
+      final String actual = "Actual flow status is " + flow.getStatus();
+      return basicinfo + expected + actual;
+    } else if (type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
+      final String flowName =
+          (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
+      final String duration =
+          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+      final String basicinfo =
+          "SLA Alert: Your flow " + flowName + " failed to SUCCEED within "
+              + duration + "</br>";
+      final String expected =
+          "Here is details : </br>" + "Flow " + flowName + " in execution "
+              + execId + " expected to FINISH within " + duration + " from "
+              + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
+      final String actual = "Actual flow status is " + flow.getStatus();
+      return basicinfo + expected + actual;
+    } else if (type.equals(SlaOption.TYPE_JOB_FINISH)) {
+      final String jobName =
+          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+      final String duration =
+          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+      return "SLA Alert: Your job " + jobName + " failed to FINISH within "
+          + duration + " in execution " + execId;
+    } else if (type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
+      final String jobName =
+          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+      final String duration =
+          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
+      return "SLA Alert: Your job " + jobName + " failed to SUCCEED within "
+          + duration + " in execution " + execId;
+    } else {
+      return "Unrecognized SLA type " + type;
+    }
+  }
+
   public String getType() {
-    return type;
+    return this.type;
   }
 
-  public void setType(String type) {
+  public void setType(final String type) {
     this.type = type;
   }
 
   public Map<String, Object> getInfo() {
-    return info;
+    return this.info;
   }
 
-  public void setInfo(Map<String, Object> info) {
+  public void setInfo(final Map<String, Object> info) {
     this.info = info;
   }
 
   public List<String> getActions() {
-    return actions;
+    return this.actions;
   }
 
-  public void setActions(List<String> actions) {
+  public void setActions(final List<String> actions) {
     this.actions = actions;
   }
 
   public Map<String, Object> toObject() {
-    HashMap<String, Object> slaObj = new HashMap<String, Object>();
+    final HashMap<String, Object> slaObj = new HashMap<>();
 
-    slaObj.put("type", type);
-    slaObj.put("info", info);
-    slaObj.put("actions", actions);
+    slaObj.put("type", this.type);
+    slaObj.put("info", this.info);
+    slaObj.put("actions", this.actions);
 
     return slaObj;
   }
 
-  @SuppressWarnings("unchecked")
-  public static SlaOption fromObject(Object object) {
-
-    HashMap<String, Object> slaObj = (HashMap<String, Object>) object;
-
-    String type = (String) slaObj.get("type");
-    List<String> actions = (List<String>) slaObj.get("actions");
-    Map<String, Object> info = (Map<String, Object>) slaObj.get("info");
-
-    return new SlaOption(type, actions, info);
-  }
-
   public Object toWebObject() {
-    HashMap<String, Object> slaObj = new HashMap<String, Object>();
+    final HashMap<String, Object> slaObj = new HashMap<>();
 
-    if (type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_FLOW_SUCCEED)) {
+    if (this.type.equals(TYPE_FLOW_FINISH) || this.type.equals(TYPE_FLOW_SUCCEED)) {
       slaObj.put("id", "");
     } else {
-      slaObj.put("id", info.get(INFO_JOB_NAME));
+      slaObj.put("id", this.info.get(INFO_JOB_NAME));
     }
-    slaObj.put("duration", info.get(INFO_DURATION));
-    if (type.equals(TYPE_FLOW_FINISH) || type.equals(TYPE_JOB_FINISH)) {
+    slaObj.put("duration", this.info.get(INFO_DURATION));
+    if (this.type.equals(TYPE_FLOW_FINISH) || this.type.equals(TYPE_JOB_FINISH)) {
       slaObj.put("rule", "FINISH");
     } else {
       slaObj.put("rule", "SUCCESS");
     }
-    List<String> actionsObj = new ArrayList<String>();
-    for (String act : actions) {
+    final List<String> actionsObj = new ArrayList<>();
+    for (final String act : this.actions) {
       if (act.equals(ACTION_ALERT)) {
         actionsObj.add("EMAIL");
       } else {
@@ -153,54 +201,4 @@ public class SlaOption {
     return "Sla of " + getType() + getInfo() + getActions();
   }
 
-  public static String createSlaMessage(SlaOption slaOption, ExecutableFlow flow) {
-    String type = slaOption.getType();
-    int execId = flow.getExecutionId();
-    if (type.equals(SlaOption.TYPE_FLOW_FINISH)) {
-      String flowName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
-      String duration =
-          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-      String basicinfo =
-          "SLA Alert: Your flow " + flowName + " failed to FINISH within "
-              + duration + "</br>";
-      String expected =
-          "Here is details : </br>" + "Flow " + flowName + " in execution "
-              + execId + " is expected to FINISH within " + duration + " from "
-              + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
-      String actual = "Actual flow status is " + flow.getStatus();
-      return basicinfo + expected + actual;
-    } else if (type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
-      String flowName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_FLOW_NAME);
-      String duration =
-          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-      String basicinfo =
-          "SLA Alert: Your flow " + flowName + " failed to SUCCEED within "
-              + duration + "</br>";
-      String expected =
-          "Here is details : </br>" + "Flow " + flowName + " in execution "
-              + execId + " expected to FINISH within " + duration + " from "
-              + fmt.print(new DateTime(flow.getStartTime())) + "</br>";
-      String actual = "Actual flow status is " + flow.getStatus();
-      return basicinfo + expected + actual;
-    } else if (type.equals(SlaOption.TYPE_JOB_FINISH)) {
-      String jobName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-      String duration =
-          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-      return "SLA Alert: Your job " + jobName + " failed to FINISH within "
-          + duration + " in execution " + execId;
-    } else if (type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
-      String jobName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-      String duration =
-          (String) slaOption.getInfo().get(SlaOption.INFO_DURATION);
-      return "SLA Alert: Your job " + jobName + " failed to SUCCEED within "
-          + duration + " in execution " + execId;
-    } else {
-      return "Unrecognized SLA type " + type;
-    }
-  }
-
 }
diff --git a/azkaban-common/src/main/java/azkaban/storage/DatabaseStorage.java b/azkaban-common/src/main/java/azkaban/storage/DatabaseStorage.java
index 3d71771..6622da0 100644
--- a/azkaban-common/src/main/java/azkaban/storage/DatabaseStorage.java
+++ b/azkaban-common/src/main/java/azkaban/storage/DatabaseStorage.java
@@ -29,29 +29,31 @@ import javax.inject.Inject;
 /**
  * DatabaseStorage
  *
- * This class helps in storing projects in the DB itself. This is intended to be the default since it is the current
- * behavior of Azkaban.
+ * This class helps in storing projects in the DB itself. This is intended to be the default since
+ * it is the current behavior of Azkaban.
  */
 public class DatabaseStorage implements Storage {
+
   private final ProjectLoader projectLoader;
 
   @Inject
-  public DatabaseStorage(ProjectLoader projectLoader) {
+  public DatabaseStorage(final ProjectLoader projectLoader) {
     this.projectLoader = projectLoader;
   }
 
   @Override
-  public InputStream get(String key) {
-    throw new UnsupportedOperationException("Not implemented yet. Use get(projectId, version) instead");
+  public InputStream get(final String key) {
+    throw new UnsupportedOperationException(
+        "Not implemented yet. Use get(projectId, version) instead");
   }
 
-  public ProjectFileHandler get(int projectId, int version) {
-    return projectLoader.getUploadedFile(projectId, version);
+  public ProjectFileHandler get(final int projectId, final int version) {
+    return this.projectLoader.getUploadedFile(projectId, version);
   }
 
   @Override
-  public String put(StorageMetadata metadata, File localFile) {
-    projectLoader.uploadProjectFile(
+  public String put(final StorageMetadata metadata, final File localFile) {
+    this.projectLoader.uploadProjectFile(
         metadata.getProjectId(),
         metadata.getVersion(),
         localFile, metadata.getUploader());
@@ -60,7 +62,7 @@ public class DatabaseStorage implements Storage {
   }
 
   @Override
-  public boolean delete(String key) {
+  public boolean delete(final String key) {
     throw new UnsupportedOperationException("Delete is not supported");
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/storage/HdfsAuth.java b/azkaban-common/src/main/java/azkaban/storage/HdfsAuth.java
index 00ea850..6a56b50 100644
--- a/azkaban-common/src/main/java/azkaban/storage/HdfsAuth.java
+++ b/azkaban-common/src/main/java/azkaban/storage/HdfsAuth.java
@@ -17,6 +17,10 @@
 
 package azkaban.storage;
 
+import static azkaban.Constants.ConfigurationKeys.AZKABAN_KERBEROS_PRINCIPAL;
+import static azkaban.Constants.ConfigurationKeys.AZKABAN_KEYTAB_PATH;
+import static java.util.Objects.requireNonNull;
+
 import azkaban.spi.AzkabanException;
 import azkaban.utils.Props;
 import com.google.inject.Inject;
@@ -25,14 +29,13 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Logger;
 
-import static azkaban.Constants.ConfigurationKeys.*;
-import static java.util.Objects.*;
-
 
 /**
- * This class helps in HDFS authorization and is a wrapper over Hadoop's {@link UserGroupInformation} class.
+ * This class helps in HDFS authorization and is a wrapper over Hadoop's {@link
+ * UserGroupInformation} class.
  */
 public class HdfsAuth {
+
   private static final Logger log = Logger.getLogger(HdfsAuth.class);
 
   private final boolean isSecurityEnabled;
@@ -42,13 +45,13 @@ public class HdfsAuth {
   private String keytabPrincipal = null;
 
   @Inject
-  public HdfsAuth(Props props, Configuration conf) {
+  public HdfsAuth(final Props props, final Configuration conf) {
     UserGroupInformation.setConfiguration(conf);
-    isSecurityEnabled = UserGroupInformation.isSecurityEnabled();
-    if (isSecurityEnabled) {
+    this.isSecurityEnabled = UserGroupInformation.isSecurityEnabled();
+    if (this.isSecurityEnabled) {
       log.info("The Hadoop cluster has enabled security");
-      keytabPath = requireNonNull(props.getString(AZKABAN_KEYTAB_PATH));
-      keytabPrincipal = requireNonNull(props.getString(AZKABAN_KERBEROS_PRINCIPAL));
+      this.keytabPath = requireNonNull(props.getString(AZKABAN_KEYTAB_PATH));
+      this.keytabPrincipal = requireNonNull(props.getString(AZKABAN_KERBEROS_PRINCIPAL));
     }
   }
 
@@ -58,27 +61,29 @@ public class HdfsAuth {
    * If the user is already logged in then it renews the TGT.
    */
   public void authorize() {
-    if (isSecurityEnabled) {
+    if (this.isSecurityEnabled) {
       try {
-        login(keytabPrincipal, keytabPath);
-      } catch (IOException e) {
+        login(this.keytabPrincipal, this.keytabPath);
+      } catch (final IOException e) {
         log.error(e);
         throw new AzkabanException(String.format(
-            "Error: Unable to authorize to Hadoop. Principal: %s Keytab: %s", keytabPrincipal, keytabPath));
+            "Error: Unable to authorize to Hadoop. Principal: %s Keytab: %s", this.keytabPrincipal,
+            this.keytabPath));
       }
     }
   }
 
-  private void login(String keytabPrincipal, String keytabPath) throws IOException {
-    if (loggedInUser == null) {
-      log.info(String.format("Logging in using Principal: %s Keytab: %s", keytabPrincipal, keytabPath));
+  private void login(final String keytabPrincipal, final String keytabPath) throws IOException {
+    if (this.loggedInUser == null) {
+      log.info(
+          String.format("Logging in using Principal: %s Keytab: %s", keytabPrincipal, keytabPath));
 
       UserGroupInformation.loginUserFromKeytab(keytabPrincipal, keytabPath);
-      loggedInUser = UserGroupInformation.getLoginUser();
-      log.info(String.format("User %s logged in.", loggedInUser));
+      this.loggedInUser = UserGroupInformation.getLoginUser();
+      log.info(String.format("User %s logged in.", this.loggedInUser));
     } else {
-      log.info(String.format("User %s already logged in. Refreshing TGT", loggedInUser));
-      loggedInUser.checkTGTAndReloginFromKeytab();
+      log.info(String.format("User %s already logged in. Refreshing TGT", this.loggedInUser));
+      this.loggedInUser.checkTGTAndReloginFromKeytab();
     }
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/storage/HdfsStorage.java b/azkaban-common/src/main/java/azkaban/storage/HdfsStorage.java
index b022314..d2d374f 100644
--- a/azkaban-common/src/main/java/azkaban/storage/HdfsStorage.java
+++ b/azkaban-common/src/main/java/azkaban/storage/HdfsStorage.java
@@ -45,31 +45,33 @@ public class HdfsStorage implements Storage {
   private final FileSystem hdfs;
 
   @Inject
-  public HdfsStorage(HdfsAuth hdfsAuth, FileSystem hdfs, AzkabanCommonModuleConfig config) {
+  public HdfsStorage(final HdfsAuth hdfsAuth, final FileSystem hdfs,
+      final AzkabanCommonModuleConfig config) {
     this.hdfsAuth = requireNonNull(hdfsAuth);
     this.hdfs = requireNonNull(hdfs);
 
     this.rootUri = config.getHdfsRootUri();
-    requireNonNull(rootUri.getAuthority(), "URI must have host:port mentioned.");
-    checkArgument(HDFS_SCHEME.equals(rootUri.getScheme()));
+    requireNonNull(this.rootUri.getAuthority(), "URI must have host:port mentioned.");
+    checkArgument(HDFS_SCHEME.equals(this.rootUri.getScheme()));
   }
 
   @Override
-  public InputStream get(String key) throws IOException {
-    hdfsAuth.authorize();
-    return hdfs.open(new Path(rootUri.toString(), key));
+  public InputStream get(final String key) throws IOException {
+    this.hdfsAuth.authorize();
+    return this.hdfs.open(new Path(this.rootUri.toString(), key));
   }
 
   @Override
-  public String put(StorageMetadata metadata, File localFile) {
-    hdfsAuth.authorize();
-    final Path projectsPath = new Path(rootUri.getPath(), String.valueOf(metadata.getProjectId()));
+  public String put(final StorageMetadata metadata, final File localFile) {
+    this.hdfsAuth.authorize();
+    final Path projectsPath = new Path(this.rootUri.getPath(),
+        String.valueOf(metadata.getProjectId()));
     try {
-      if (hdfs.mkdirs(projectsPath)) {
+      if (this.hdfs.mkdirs(projectsPath)) {
         log.info("Created project dir: " + projectsPath);
       }
       final Path targetPath = createTargetPath(metadata, projectsPath);
-      if (hdfs.exists(targetPath)) {
+      if (this.hdfs.exists(targetPath)) {
         log.info(
             String.format("Duplicate Found: meta: %s path: %s", metadata, targetPath));
         return getRelativePath(targetPath);
@@ -77,19 +79,19 @@ public class HdfsStorage implements Storage {
 
       // Copy file to HDFS
       log.info(String.format("Creating project artifact: meta: %s path: %s", metadata, targetPath));
-      hdfs.copyFromLocalFile(new Path(localFile.getAbsolutePath()), targetPath);
+      this.hdfs.copyFromLocalFile(new Path(localFile.getAbsolutePath()), targetPath);
       return getRelativePath(targetPath);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       log.error("error in put(): Metadata: " + metadata);
       throw new StorageException(e);
     }
   }
 
-  private String getRelativePath(Path targetPath) {
-    return URI.create(rootUri.getPath()).relativize(targetPath.toUri()).getPath();
+  private String getRelativePath(final Path targetPath) {
+    return URI.create(this.rootUri.getPath()).relativize(targetPath.toUri()).getPath();
   }
 
-  private Path createTargetPath(StorageMetadata metadata, Path projectsPath) {
+  private Path createTargetPath(final StorageMetadata metadata, final Path projectsPath) {
     return new Path(projectsPath, String.format("%s-%s.zip",
         String.valueOf(metadata.getProjectId()),
         new String(Hex.encodeHex(metadata.getHash()))
@@ -97,7 +99,7 @@ public class HdfsStorage implements Storage {
   }
 
   @Override
-  public boolean delete(String key) {
+  public boolean delete(final String key) {
     throw new UnsupportedOperationException("Method not implemented");
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/storage/LocalStorage.java b/azkaban-common/src/main/java/azkaban/storage/LocalStorage.java
index 076b07a..dafc738 100644
--- a/azkaban-common/src/main/java/azkaban/storage/LocalStorage.java
+++ b/azkaban-common/src/main/java/azkaban/storage/LocalStorage.java
@@ -24,7 +24,6 @@ import azkaban.spi.Storage;
 import azkaban.spi.StorageException;
 import azkaban.spi.StorageMetadata;
 import azkaban.utils.FileIOUtils;
-import com.google.common.io.Files;
 import com.google.inject.Inject;
 import java.io.File;
 import java.io.FileInputStream;
@@ -41,22 +40,39 @@ public class LocalStorage implements Storage {
   final File rootDirectory;
 
   @Inject
-  public LocalStorage(AzkabanCommonModuleConfig config) {
+  public LocalStorage(final AzkabanCommonModuleConfig config) {
     this.rootDirectory = validateRootDirectory(
         createIfDoesNotExist(config.getLocalStorageBaseDirPath()));
   }
 
+  private static File createIfDoesNotExist(final String baseDirectoryPath) {
+    final File baseDirectory = new File(baseDirectoryPath);
+    if (!baseDirectory.exists()) {
+      baseDirectory.mkdir();
+      log.info("Creating dir: " + baseDirectory.getAbsolutePath());
+    }
+    return baseDirectory;
+  }
+
+  private static File validateRootDirectory(final File baseDirectory) {
+    checkArgument(baseDirectory.isDirectory());
+    if (!FileIOUtils.isDirWritable(baseDirectory)) {
+      throw new IllegalArgumentException("Directory not writable: " + baseDirectory);
+    }
+    return baseDirectory;
+  }
+
   /**
    * @param key Relative path of the file from the baseDirectory
    */
   @Override
-  public InputStream get(String key) throws IOException {
-    return new FileInputStream(new File(rootDirectory, key));
+  public InputStream get(final String key) throws IOException {
+    return new FileInputStream(new File(this.rootDirectory, key));
   }
 
   @Override
-  public String put(StorageMetadata metadata, File localFile) {
-    final File projectDir = new File(rootDirectory, String.valueOf(metadata.getProjectId()));
+  public String put(final StorageMetadata metadata, final File localFile) {
+    final File projectDir = new File(this.rootDirectory, String.valueOf(metadata.getProjectId()));
     if (projectDir.mkdir()) {
       log.info("Created project dir: " + projectDir.getAbsolutePath());
     }
@@ -74,36 +90,19 @@ public class LocalStorage implements Storage {
     // Copy file to storage dir
     try {
       FileUtils.copyFile(localFile, targetFile);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       log.error("LocalStorage error in put(): meta: " + metadata);
       throw new StorageException(e);
     }
     return getRelativePath(targetFile);
   }
 
-  private String getRelativePath(File targetFile) {
-    return rootDirectory.toURI().relativize(targetFile.toURI()).getPath();
+  private String getRelativePath(final File targetFile) {
+    return this.rootDirectory.toURI().relativize(targetFile.toURI()).getPath();
   }
 
   @Override
-  public boolean delete(String key) {
+  public boolean delete(final String key) {
     throw new UnsupportedOperationException("delete has not been implemented.");
   }
-
-  private static File createIfDoesNotExist(String baseDirectoryPath) {
-    final File baseDirectory = new File(baseDirectoryPath);
-    if (!baseDirectory.exists()) {
-      baseDirectory.mkdir();
-      log.info("Creating dir: " + baseDirectory.getAbsolutePath());
-    }
-    return baseDirectory;
-  }
-
-  private static File validateRootDirectory(File baseDirectory) {
-    checkArgument(baseDirectory.isDirectory());
-    if (!FileIOUtils.isDirWritable(baseDirectory)) {
-      throw new IllegalArgumentException("Directory not writable: " + baseDirectory);
-    }
-    return baseDirectory;
-  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/storage/StorageImplementationType.java b/azkaban-common/src/main/java/azkaban/storage/StorageImplementationType.java
index 4c5a074..3f7ae25 100644
--- a/azkaban-common/src/main/java/azkaban/storage/StorageImplementationType.java
+++ b/azkaban-common/src/main/java/azkaban/storage/StorageImplementationType.java
@@ -27,19 +27,19 @@ public enum StorageImplementationType {
 
   private final Class<? extends Storage> implementationClass;
 
-  StorageImplementationType(Class<? extends Storage> implementationClass) {
+  StorageImplementationType(final Class<? extends Storage> implementationClass) {
     this.implementationClass = implementationClass;
   }
 
-  public Class<? extends Storage> getImplementationClass() {
-    return implementationClass;
-  }
-
-  public static StorageImplementationType from(String name) {
+  public static StorageImplementationType from(final String name) {
     try {
       return valueOf(name);
-    } catch (NullPointerException e) {
+    } catch (final NullPointerException e) {
       return null;
     }
   }
+
+  public Class<? extends Storage> getImplementationClass() {
+    return this.implementationClass;
+  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/storage/StorageManager.java b/azkaban-common/src/main/java/azkaban/storage/StorageManager.java
index ff83f20..f8a6281 100644
--- a/azkaban-common/src/main/java/azkaban/storage/StorageManager.java
+++ b/azkaban-common/src/main/java/azkaban/storage/StorageManager.java
@@ -17,6 +17,10 @@
 
 package azkaban.storage;
 
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkState;
+import static java.util.Objects.requireNonNull;
+
 import azkaban.project.Project;
 import azkaban.project.ProjectFileHandler;
 import azkaban.project.ProjectLoader;
@@ -35,15 +39,13 @@ import java.util.Arrays;
 import org.apache.commons.io.IOUtils;
 import org.apache.log4j.Logger;
 
-import static com.google.common.base.Preconditions.*;
-import static java.util.Objects.*;
-
 
 /**
- * StorageManager manages and coordinates all interactions with the Storage layer. This also includes bookkeeping
- * like updating DB with the new versionm, etc
+ * StorageManager manages and coordinates all interactions with the Storage layer. This also
+ * includes bookkeeping like updating DB with the new versionm, etc
  */
 public class StorageManager {
+
   private static final Logger log = Logger.getLogger(StorageManager.class);
 
   private final Storage storage;
@@ -51,7 +53,8 @@ public class StorageManager {
   private final File tempDir;
 
   @Inject
-  public StorageManager(Props props, Storage storage, ProjectLoader projectLoader) {
+  public StorageManager(final Props props, final Storage storage,
+      final ProjectLoader projectLoader) {
     this.tempDir = new File(props.getString("project.temp.dir", "temp"));
     this.storage = requireNonNull(storage);
     this.projectLoader = requireNonNull(projectLoader);
@@ -60,10 +63,10 @@ public class StorageManager {
   }
 
   private void prepareTempDir() {
-    if (!tempDir.exists()) {
-      tempDir.mkdirs();
+    if (!this.tempDir.exists()) {
+      this.tempDir.mkdirs();
     }
-    checkArgument(tempDir.isDirectory());
+    checkArgument(this.tempDir.isDirectory());
   }
 
   /**
@@ -71,18 +74,18 @@ public class StorageManager {
    *
    * TODO clean up interface
    *
-   * @param project           project
-   * @param version           The new version to be uploaded
-   * @param localFile         local file
-   * @param uploader          the user who uploaded
+   * @param project project
+   * @param version The new version to be uploaded
+   * @param localFile local file
+   * @param uploader the user who uploaded
    */
   public void uploadProject(
-      Project project,
-      int version,
-      File localFile,
-      User uploader) {
+      final Project project,
+      final int version,
+      final File localFile,
+      final User uploader) {
     byte[] md5 = null;
-    if (!(storage instanceof DatabaseStorage)) {
+    if (!(this.storage instanceof DatabaseStorage)) {
       md5 = computeHash(localFile);
     }
     final StorageMetadata metadata = new StorageMetadata(
@@ -94,12 +97,12 @@ public class StorageManager {
         metadata, localFile.getName(), localFile.length()));
 
     /* upload to storage */
-    final String resourceId = storage.put(metadata, localFile);
+    final String resourceId = this.storage.put(metadata, localFile);
 
     /* Add metadata to db */
     // TODO spyne: remove hack. Database storage should go through the same flow
-    if (!(storage instanceof DatabaseStorage)) {
-      projectLoader.addProjectVersion(
+    if (!(this.storage instanceof DatabaseStorage)) {
+      this.projectLoader.addProjectVersion(
           project.getId(),
           version,
           localFile,
@@ -112,11 +115,11 @@ public class StorageManager {
     }
   }
 
-  private byte[] computeHash(File localFile) {
+  private byte[] computeHash(final File localFile) {
     final byte[] md5;
     try {
       md5 = Md5Hasher.md5Hash(localFile);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new StorageException(e);
     }
     return md5;
@@ -130,19 +133,21 @@ public class StorageManager {
    * @return Handler object containing hooks to fetched project file
    */
   public ProjectFileHandler getProjectFile(final int projectId, final int version) {
-    log.info(String.format("Fetching project file. project ID: %d version: %d", projectId, version));
+    log.info(
+        String.format("Fetching project file. project ID: %d version: %d", projectId, version));
     // TODO spyne: remove huge hack ! There should not be any special handling for Database Storage.
-    if (storage instanceof DatabaseStorage) {
-      return ((DatabaseStorage) storage).get(projectId, version);
+    if (this.storage instanceof DatabaseStorage) {
+      return ((DatabaseStorage) this.storage).get(projectId, version);
     }
 
     /* Fetch meta data from db */
-    final ProjectFileHandler pfh = projectLoader.fetchProjectMetaData(projectId, version);
+    final ProjectFileHandler pfh = this.projectLoader.fetchProjectMetaData(projectId, version);
 
     /* Fetch project file from storage and copy to local file */
-    final String resourceId = requireNonNull(pfh.getResourceId(), String.format("URI is null. project ID: %d version: %d",
-        pfh.getProjectId(), pfh.getVersion()));
-    try (InputStream is = storage.get(resourceId)){
+    final String resourceId = requireNonNull(pfh.getResourceId(),
+        String.format("URI is null. project ID: %d version: %d",
+            pfh.getProjectId(), pfh.getVersion()));
+    try (InputStream is = this.storage.get(resourceId)) {
       final File file = createTempOutputFile(pfh);
 
       /* Copy from storage to output stream */
@@ -157,12 +162,12 @@ public class StorageManager {
       pfh.setLocalFile(file);
 
       return pfh;
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new StorageException(e);
     }
   }
 
-  private void validateChecksum(File file, ProjectFileHandler pfh) throws IOException {
+  private void validateChecksum(final File file, final ProjectFileHandler pfh) throws IOException {
     final byte[] hash = Md5Hasher.md5Hash(file);
     checkState(Arrays.equals(pfh.getMd5Hash(), hash),
         String.format("MD5 HASH Failed. project ID: %d version: %d Expected: %s Actual: %s",
@@ -170,9 +175,10 @@ public class StorageManager {
     );
   }
 
-  private File createTempOutputFile(ProjectFileHandler projectFileHandler) throws IOException {
+  private File createTempOutputFile(final ProjectFileHandler projectFileHandler)
+      throws IOException {
     return File.createTempFile(
         projectFileHandler.getFileName(),
-        String.valueOf(projectFileHandler.getVersion()), tempDir);
+        String.valueOf(projectFileHandler.getVersion()), this.tempDir);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/ActionTypeLoader.java b/azkaban-common/src/main/java/azkaban/trigger/ActionTypeLoader.java
index 45b4fbb..245b53e 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/ActionTypeLoader.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/ActionTypeLoader.java
@@ -16,48 +16,44 @@
 
 package azkaban.trigger;
 
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Set;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Props;
-import azkaban.utils.Utils;
-
 public class ActionTypeLoader {
 
-  private static Logger logger = Logger.getLogger(ActionTypeLoader.class);
-
   public static final String DEFAULT_TRIGGER_ACTION_PLUGIN_DIR =
       "plugins/triggeractions";
-
+  private static final Logger logger = Logger.getLogger(ActionTypeLoader.class);
   protected static Map<String, Class<? extends TriggerAction>> actionToClass =
-      new HashMap<String, Class<? extends TriggerAction>>();
+      new HashMap<>();
+
+  public static void registerBuiltinActions(
+      final Map<String, Class<? extends TriggerAction>> builtinActions) {
+    actionToClass.putAll(builtinActions);
+    for (final String type : builtinActions.keySet()) {
+      logger.info("Loaded " + type + " action.");
+    }
+  }
 
-  public void init(Props props) throws TriggerException {
+  public void init(final Props props) throws TriggerException {
   }
 
-  public synchronized void registerActionType(String type,
-      Class<? extends TriggerAction> actionClass) {
+  public synchronized void registerActionType(final String type,
+      final Class<? extends TriggerAction> actionClass) {
     logger.info("Registering action " + type);
     if (!actionToClass.containsKey(type)) {
       actionToClass.put(type, actionClass);
     }
   }
 
-  public static void registerBuiltinActions(
-      Map<String, Class<? extends TriggerAction>> builtinActions) {
-    actionToClass.putAll(builtinActions);
-    for (String type : builtinActions.keySet()) {
-      logger.info("Loaded " + type + " action.");
-    }
-  }
-
-  public TriggerAction createActionFromJson(String type, Object obj)
+  public TriggerAction createActionFromJson(final String type, final Object obj)
       throws Exception {
     TriggerAction action = null;
-    Class<? extends TriggerAction> actionClass = actionToClass.get(type);
+    final Class<? extends TriggerAction> actionClass = actionToClass.get(type);
     if (actionClass == null) {
       throw new Exception("Action Type " + type + " not supported!");
     }
@@ -68,9 +64,9 @@ public class ActionTypeLoader {
     return action;
   }
 
-  public TriggerAction createAction(String type, Object... args) {
+  public TriggerAction createAction(final String type, final Object... args) {
     TriggerAction action = null;
-    Class<? extends TriggerAction> actionClass = actionToClass.get(type);
+    final Class<? extends TriggerAction> actionClass = actionToClass.get(type);
     action = (TriggerAction) Utils.callConstructor(actionClass, args);
 
     return action;
diff --git a/azkaban-common/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java b/azkaban-common/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java
index bae43c9..05b7316 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/builtin/BasicTimeChecker.java
@@ -16,39 +16,33 @@
 
 package azkaban.trigger.builtin;
 
+import azkaban.trigger.ConditionChecker;
+import azkaban.utils.Utils;
+import java.util.Date;
 import java.util.HashMap;
 import java.util.Map;
-import java.util.Date;
-
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.ReadablePeriod;
-import org.apache.log4j.Logger;
-
 import org.quartz.CronExpression;
 
-import azkaban.trigger.ConditionChecker;
-import azkaban.utils.Utils;
-
 public class BasicTimeChecker implements ConditionChecker {
 
 
   public static final String type = "BasicTimeChecker";
-
-  private long firstCheckTime;
+  private final String id;
+  private final long firstCheckTime;
+  private final DateTimeZone timezone;
+  private final ReadablePeriod period;
+  private final String cronExpression;
+  private final CronExpression cronExecutionTime;
   private long nextCheckTime;
-  private DateTimeZone timezone;
   private boolean isRecurring = true;
   private boolean skipPastChecks = true;
-  private ReadablePeriod period;
 
-  private String cronExpression;
-  private CronExpression cronExecutionTime;
-  private final String id;
-
-  public BasicTimeChecker(String id, long firstCheckTime,
-      DateTimeZone timezone, boolean isRecurring, boolean skipPastChecks,
-      ReadablePeriod period, String cronExpression) {
+  public BasicTimeChecker(final String id, final long firstCheckTime,
+      final DateTimeZone timezone, final boolean isRecurring, final boolean skipPastChecks,
+      final ReadablePeriod period, final String cronExpression) {
     this.id = id;
     this.firstCheckTime = firstCheckTime;
     this.timezone = timezone;
@@ -57,56 +51,88 @@ public class BasicTimeChecker implements ConditionChecker {
     this.period = period;
     this.nextCheckTime = firstCheckTime;
     this.cronExpression = cronExpression;
-    cronExecutionTime = Utils.parseCronExpression(cronExpression, timezone);
+    this.cronExecutionTime = Utils.parseCronExpression(cronExpression, timezone);
     this.nextCheckTime = calculateNextCheckTime();
   }
 
+  public BasicTimeChecker(final String id, final long firstCheckTime,
+      final DateTimeZone timezone, final long nextCheckTime, final boolean isRecurring,
+      final boolean skipPastChecks, final ReadablePeriod period, final String cronExpression) {
+    this.id = id;
+    this.firstCheckTime = firstCheckTime;
+    this.timezone = timezone;
+    this.nextCheckTime = nextCheckTime;
+    this.isRecurring = isRecurring;
+    this.skipPastChecks = skipPastChecks;
+    this.period = period;
+    this.cronExpression = cronExpression;
+    this.cronExecutionTime = Utils.parseCronExpression(cronExpression, timezone);
+  }
+
+  public static BasicTimeChecker createFromJson(final Object obj) throws Exception {
+    return createFromJson((HashMap<String, Object>) obj);
+  }
+
+  public static BasicTimeChecker createFromJson(final HashMap<String, Object> obj)
+      throws Exception {
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+    if (!jsonObj.get("type").equals(type)) {
+      throw new Exception("Cannot create checker of " + type + " from "
+          + jsonObj.get("type"));
+    }
+    final Long firstCheckTime = Long.valueOf((String) jsonObj.get("firstCheckTime"));
+    final String timezoneId = (String) jsonObj.get("timezone");
+    final long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
+    final DateTimeZone timezone = DateTimeZone.forID(timezoneId);
+    final boolean isRecurring = Boolean.valueOf((String) jsonObj.get("isRecurring"));
+    final boolean skipPastChecks =
+        Boolean.valueOf((String) jsonObj.get("skipPastChecks"));
+    final ReadablePeriod period =
+        Utils.parsePeriodString((String) jsonObj.get("period"));
+    final String id = (String) jsonObj.get("id");
+    final String cronExpression = (String) jsonObj.get("cronExpression");
+
+    final BasicTimeChecker checker =
+        new BasicTimeChecker(id, firstCheckTime, timezone, nextCheckTime,
+            isRecurring, skipPastChecks, period, cronExpression);
+    if (skipPastChecks) {
+      checker.updateNextCheckTime();
+    }
+    return checker;
+  }
+
   public long getFirstCheckTime() {
-    return firstCheckTime;
+    return this.firstCheckTime;
   }
 
   public DateTimeZone getTimeZone() {
-    return timezone;
+    return this.timezone;
   }
 
   public boolean isRecurring() {
-    return isRecurring;
+    return this.isRecurring;
   }
 
   public boolean isSkipPastChecks() {
-    return skipPastChecks;
+    return this.skipPastChecks;
   }
 
   public ReadablePeriod getPeriod() {
-    return period;
+    return this.period;
   }
 
   @Override
   public long getNextCheckTime() {
-    return nextCheckTime;
+    return this.nextCheckTime;
   }
 
   public String getCronExpression() {
-    return cronExpression;
-  }
-
-  public BasicTimeChecker(String id, long firstCheckTime,
-      DateTimeZone timezone, long nextCheckTime, boolean isRecurring,
-      boolean skipPastChecks, ReadablePeriod period, String cronExpression) {
-    this.id = id;
-    this.firstCheckTime = firstCheckTime;
-    this.timezone = timezone;
-    this.nextCheckTime = nextCheckTime;
-    this.isRecurring = isRecurring;
-    this.skipPastChecks = skipPastChecks;
-    this.period = period;
-    this.cronExpression = cronExpression;
-    cronExecutionTime = Utils.parseCronExpression(cronExpression, timezone);
+    return this.cronExpression;
   }
 
   @Override
   public Boolean eval() {
-    return nextCheckTime < System.currentTimeMillis();
+    return this.nextCheckTime < System.currentTimeMillis();
   }
 
   @Override
@@ -116,7 +142,7 @@ public class BasicTimeChecker implements ConditionChecker {
 
   @Override
   public String getId() {
-    return id;
+    return this.id;
   }
 
   @Override
@@ -124,66 +150,33 @@ public class BasicTimeChecker implements ConditionChecker {
     return type;
   }
 
-  @SuppressWarnings("unchecked")
-  public static BasicTimeChecker createFromJson(Object obj) throws Exception {
-    return createFromJson((HashMap<String, Object>) obj);
-  }
-
-  public static BasicTimeChecker createFromJson(HashMap<String, Object> obj)
-      throws Exception {
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
-    if (!jsonObj.get("type").equals(type)) {
-      throw new Exception("Cannot create checker of " + type + " from "
-          + jsonObj.get("type"));
-    }
-    Long firstCheckTime = Long.valueOf((String) jsonObj.get("firstCheckTime"));
-    String timezoneId = (String) jsonObj.get("timezone");
-    long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
-    DateTimeZone timezone = DateTimeZone.forID(timezoneId);
-    boolean isRecurring = Boolean.valueOf((String) jsonObj.get("isRecurring"));
-    boolean skipPastChecks =
-        Boolean.valueOf((String) jsonObj.get("skipPastChecks"));
-    ReadablePeriod period =
-        Utils.parsePeriodString((String) jsonObj.get("period"));
-    String id = (String) jsonObj.get("id");
-    String cronExpression = (String) jsonObj.get("cronExpression");
-
-    BasicTimeChecker checker =
-        new BasicTimeChecker(id, firstCheckTime, timezone, nextCheckTime,
-            isRecurring, skipPastChecks, period, cronExpression);
-    if (skipPastChecks) {
-      checker.updateNextCheckTime();
-    }
-    return checker;
-  }
-
   @Override
-  public BasicTimeChecker fromJson(Object obj) throws Exception {
+  public BasicTimeChecker fromJson(final Object obj) throws Exception {
     return createFromJson(obj);
   }
 
   private void updateNextCheckTime() {
-    nextCheckTime = calculateNextCheckTime();
+    this.nextCheckTime = calculateNextCheckTime();
   }
 
   private long calculateNextCheckTime() {
-    DateTime date = new DateTime(nextCheckTime).withZone(timezone);
+    DateTime date = new DateTime(this.nextCheckTime).withZone(this.timezone);
     int count = 0;
     while (!date.isAfterNow()) {
       if (count > 100000) {
         throw new IllegalStateException(
             "100000 increments of period did not get to present time.");
       }
-      if (period == null && cronExpression == null) {
+      if (this.period == null && this.cronExpression == null) {
         break;
-      } else if (cronExecutionTime != null) {
-        Date nextDate = cronExecutionTime.getNextValidTimeAfter(date.toDate());
+      } else if (this.cronExecutionTime != null) {
+        final Date nextDate = this.cronExecutionTime.getNextValidTimeAfter(date.toDate());
         date = new DateTime(nextDate);
       } else {
-        date = date.plus(period);
+        date = date.plus(this.period);
       }
       count += 1;
-      if (!skipPastChecks) {
+      if (!this.skipPastChecks) {
         continue;
       }
     }
@@ -197,16 +190,16 @@ public class BasicTimeChecker implements ConditionChecker {
 
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
+    final Map<String, Object> jsonObj = new HashMap<>();
     jsonObj.put("type", type);
-    jsonObj.put("firstCheckTime", String.valueOf(firstCheckTime));
-    jsonObj.put("timezone", timezone.getID());
-    jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
-    jsonObj.put("isRecurring", String.valueOf(isRecurring));
-    jsonObj.put("skipPastChecks", String.valueOf(skipPastChecks));
-    jsonObj.put("period", Utils.createPeriodString(period));
-    jsonObj.put("id", id);
-    jsonObj.put("cronExpression", cronExpression);
+    jsonObj.put("firstCheckTime", String.valueOf(this.firstCheckTime));
+    jsonObj.put("timezone", this.timezone.getID());
+    jsonObj.put("nextCheckTime", String.valueOf(this.nextCheckTime));
+    jsonObj.put("isRecurring", String.valueOf(this.isRecurring));
+    jsonObj.put("skipPastChecks", String.valueOf(this.skipPastChecks));
+    jsonObj.put("period", Utils.createPeriodString(this.period));
+    jsonObj.put("id", this.id);
+    jsonObj.put("cronExpression", this.cronExpression);
 
     return jsonObj;
   }
@@ -217,7 +210,7 @@ public class BasicTimeChecker implements ConditionChecker {
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java b/azkaban-common/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java
index 839ab0c..985b35e 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/builtin/CreateTriggerAction.java
@@ -16,80 +16,77 @@
 
 package azkaban.trigger.builtin;
 
-import java.util.HashMap;
-import java.util.Map;
-
 import azkaban.trigger.Trigger;
 import azkaban.trigger.TriggerAction;
 import azkaban.trigger.TriggerManager;
+import java.util.HashMap;
+import java.util.Map;
 
 public class CreateTriggerAction implements TriggerAction {
 
   public static final String type = "CreateTriggerAction";
   private static TriggerManager triggerManager;
-  private Trigger trigger;
-  @SuppressWarnings("unused")
+  private final Trigger trigger;
+  private final String actionId;
   private Map<String, Object> context;
-  private String actionId;
 
-  public CreateTriggerAction(String actionId, Trigger trigger) {
+  public CreateTriggerAction(final String actionId, final Trigger trigger) {
     this.actionId = actionId;
     this.trigger = trigger;
   }
 
-  @Override
-  public String getType() {
-    return type;
-  }
-
-  public static void setTriggerManager(TriggerManager trm) {
+  public static void setTriggerManager(final TriggerManager trm) {
     triggerManager = trm;
   }
 
-  @SuppressWarnings("unchecked")
-  public static CreateTriggerAction createFromJson(Object obj) throws Exception {
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+  public static CreateTriggerAction createFromJson(final Object obj) throws Exception {
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
     if (!jsonObj.get("type").equals(type)) {
       throw new Exception("Cannot create action of " + type + " from "
           + jsonObj.get("type"));
     }
-    String actionId = (String) jsonObj.get("actionId");
-    Trigger trigger = Trigger.fromJson(jsonObj.get("trigger"));
+    final String actionId = (String) jsonObj.get("actionId");
+    final Trigger trigger = Trigger.fromJson(jsonObj.get("trigger"));
     return new CreateTriggerAction(actionId, trigger);
   }
 
   @Override
-  public CreateTriggerAction fromJson(Object obj) throws Exception {
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public CreateTriggerAction fromJson(final Object obj) throws Exception {
     return createFromJson(obj);
   }
 
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
-    jsonObj.put("actionId", actionId);
+    final Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("actionId", this.actionId);
     jsonObj.put("type", type);
-    jsonObj.put("trigger", trigger.toJson());
+    jsonObj.put("trigger", this.trigger.toJson());
 
     return jsonObj;
   }
 
   @Override
   public void doAction() throws Exception {
-    triggerManager.insertTrigger(trigger);
+    triggerManager.insertTrigger(this.trigger);
   }
 
   @Override
   public String getDescription() {
-    return "create another: " + trigger.getDescription();
+    return "create another: " + this.trigger.getDescription();
   }
 
   @Override
   public String getId() {
-    return actionId;
+    return this.actionId;
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
     this.context = context;
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java b/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java
index dfbdbe0..fff11e6 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecuteFlowAction.java
@@ -16,13 +16,6 @@
 
 package azkaban.trigger.builtin;
 
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
 import azkaban.executor.ExecutorManagerAdapter;
@@ -31,11 +24,13 @@ import azkaban.flow.Flow;
 import azkaban.project.Project;
 import azkaban.project.ProjectManager;
 import azkaban.sla.SlaOption;
-import azkaban.trigger.Condition;
-import azkaban.trigger.ConditionChecker;
-import azkaban.trigger.Trigger;
 import azkaban.trigger.TriggerAction;
 import azkaban.trigger.TriggerManager;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.apache.log4j.Logger;
 
 public class ExecuteFlowAction implements TriggerAction {
 
@@ -45,20 +40,19 @@ public class ExecuteFlowAction implements TriggerAction {
 
   private static ExecutorManagerAdapter executorManager;
   private static TriggerManager triggerManager;
-  private String actionId;
+  private static ProjectManager projectManager;
+  private static Logger logger = Logger.getLogger(ExecuteFlowAction.class);
+  private final String actionId;
+  private final String projectName;
   private int projectId;
-  private String projectName;
   private String flowName;
   private String submitUser;
-  private static ProjectManager projectManager;
   private ExecutionOptions executionOptions = new ExecutionOptions();
   private List<SlaOption> slaOptions;
 
-  private static Logger logger = Logger.getLogger(ExecuteFlowAction.class);
-
-  public ExecuteFlowAction(String actionId, int projectId, String projectName,
-      String flowName, String submitUser, ExecutionOptions executionOptions,
-      List<SlaOption> slaOptions) {
+  public ExecuteFlowAction(final String actionId, final int projectId, final String projectName,
+      final String flowName, final String submitUser, final ExecutionOptions executionOptions,
+      final List<SlaOption> slaOptions) {
     this.actionId = actionId;
     this.projectId = projectId;
     this.projectName = projectName;
@@ -68,76 +62,105 @@ public class ExecuteFlowAction implements TriggerAction {
     this.slaOptions = slaOptions;
   }
 
-  public static void setLogger(Logger logger) {
+  public static void setLogger(final Logger logger) {
     ExecuteFlowAction.logger = logger;
   }
 
-  public String getProjectName() {
-    return projectName;
+  public static ExecutorManagerAdapter getExecutorManager() {
+    return executorManager;
   }
 
-  public int getProjectId() {
-    return projectId;
+  public static void setExecutorManager(final ExecutorManagerAdapter executorManager) {
+    ExecuteFlowAction.executorManager = executorManager;
   }
 
-  protected void setProjectId(int projectId) {
-    this.projectId = projectId;
+  public static TriggerManager getTriggerManager() {
+    return triggerManager;
   }
 
-  public String getFlowName() {
-    return flowName;
+  public static void setTriggerManager(final TriggerManager triggerManager) {
+    ExecuteFlowAction.triggerManager = triggerManager;
   }
 
-  protected void setFlowName(String flowName) {
-    this.flowName = flowName;
+  public static ProjectManager getProjectManager() {
+    return projectManager;
   }
 
-  public String getSubmitUser() {
-    return submitUser;
+  public static void setProjectManager(final ProjectManager projectManager) {
+    ExecuteFlowAction.projectManager = projectManager;
   }
 
-  protected void setSubmitUser(String submitUser) {
-    this.submitUser = submitUser;
+  public static TriggerAction createFromJson(final HashMap<String, Object> obj) {
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+    final String objType = (String) jsonObj.get("type");
+    if (!objType.equals(type)) {
+      throw new RuntimeException("Cannot create action of " + type + " from "
+          + objType);
+    }
+    final String actionId = (String) jsonObj.get("actionId");
+    final int projectId = Integer.valueOf((String) jsonObj.get("projectId"));
+    final String projectName = (String) jsonObj.get("projectName");
+    final String flowName = (String) jsonObj.get("flowName");
+    final String submitUser = (String) jsonObj.get("submitUser");
+    ExecutionOptions executionOptions = null;
+    if (jsonObj.containsKey("executionOptions")) {
+      executionOptions =
+          ExecutionOptions.createFromObject(jsonObj.get("executionOptions"));
+    }
+    List<SlaOption> slaOptions = null;
+    if (jsonObj.containsKey("slaOptions")) {
+      slaOptions = new ArrayList<>();
+      final List<Object> slaOptionsObj = (List<Object>) jsonObj.get("slaOptions");
+      for (final Object slaObj : slaOptionsObj) {
+        slaOptions.add(SlaOption.fromObject(slaObj));
+      }
+    }
+    return new ExecuteFlowAction(actionId, projectId, projectName, flowName,
+        submitUser, executionOptions, slaOptions);
   }
 
-  public ExecutionOptions getExecutionOptions() {
-    return executionOptions;
+  public String getProjectName() {
+    return this.projectName;
   }
 
-  protected void setExecutionOptions(ExecutionOptions executionOptions) {
-    this.executionOptions = executionOptions;
+  public int getProjectId() {
+    return this.projectId;
   }
 
-  public List<SlaOption> getSlaOptions() {
-    return slaOptions;
+  protected void setProjectId(final int projectId) {
+    this.projectId = projectId;
   }
 
-  protected void setSlaOptions(List<SlaOption> slaOptions) {
-    this.slaOptions = slaOptions;
+  public String getFlowName() {
+    return this.flowName;
   }
 
-  public static ExecutorManagerAdapter getExecutorManager() {
-    return executorManager;
+  protected void setFlowName(final String flowName) {
+    this.flowName = flowName;
   }
 
-  public static void setExecutorManager(ExecutorManagerAdapter executorManager) {
-    ExecuteFlowAction.executorManager = executorManager;
+  public String getSubmitUser() {
+    return this.submitUser;
   }
 
-  public static TriggerManager getTriggerManager() {
-    return triggerManager;
+  protected void setSubmitUser(final String submitUser) {
+    this.submitUser = submitUser;
   }
 
-  public static void setTriggerManager(TriggerManager triggerManager) {
-    ExecuteFlowAction.triggerManager = triggerManager;
+  public ExecutionOptions getExecutionOptions() {
+    return this.executionOptions;
   }
 
-  public static ProjectManager getProjectManager() {
-    return projectManager;
+  protected void setExecutionOptions(final ExecutionOptions executionOptions) {
+    this.executionOptions = executionOptions;
   }
 
-  public static void setProjectManager(ProjectManager projectManager) {
-    ExecuteFlowAction.projectManager = projectManager;
+  public List<SlaOption> getSlaOptions() {
+    return this.slaOptions;
+  }
+
+  protected void setSlaOptions(final List<SlaOption> slaOptions) {
+    this.slaOptions = slaOptions;
   }
 
   @Override
@@ -145,57 +168,26 @@ public class ExecuteFlowAction implements TriggerAction {
     return type;
   }
 
-  @SuppressWarnings("unchecked")
   @Override
-  public TriggerAction fromJson(Object obj) {
+  public TriggerAction fromJson(final Object obj) {
     return createFromJson((HashMap<String, Object>) obj);
   }
 
-  @SuppressWarnings("unchecked")
-  public static TriggerAction createFromJson(HashMap<String, Object> obj) {
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
-    String objType = (String) jsonObj.get("type");
-    if (!objType.equals(type)) {
-      throw new RuntimeException("Cannot create action of " + type + " from "
-          + objType);
-    }
-    String actionId = (String) jsonObj.get("actionId");
-    int projectId = Integer.valueOf((String) jsonObj.get("projectId"));
-    String projectName = (String) jsonObj.get("projectName");
-    String flowName = (String) jsonObj.get("flowName");
-    String submitUser = (String) jsonObj.get("submitUser");
-    ExecutionOptions executionOptions = null;
-    if (jsonObj.containsKey("executionOptions")) {
-      executionOptions =
-          ExecutionOptions.createFromObject(jsonObj.get("executionOptions"));
-    }
-    List<SlaOption> slaOptions = null;
-    if (jsonObj.containsKey("slaOptions")) {
-      slaOptions = new ArrayList<SlaOption>();
-      List<Object> slaOptionsObj = (List<Object>) jsonObj.get("slaOptions");
-      for (Object slaObj : slaOptionsObj) {
-        slaOptions.add(SlaOption.fromObject(slaObj));
-      }
-    }
-    return new ExecuteFlowAction(actionId, projectId, projectName, flowName,
-        submitUser, executionOptions, slaOptions);
-  }
-
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
-    jsonObj.put("actionId", actionId);
+    final Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("actionId", this.actionId);
     jsonObj.put("type", type);
-    jsonObj.put("projectId", String.valueOf(projectId));
-    jsonObj.put("projectName", projectName);
-    jsonObj.put("flowName", flowName);
-    jsonObj.put("submitUser", submitUser);
-    if (executionOptions != null) {
-      jsonObj.put("executionOptions", executionOptions.toObject());
+    jsonObj.put("projectId", String.valueOf(this.projectId));
+    jsonObj.put("projectName", this.projectName);
+    jsonObj.put("flowName", this.flowName);
+    jsonObj.put("submitUser", this.submitUser);
+    if (this.executionOptions != null) {
+      jsonObj.put("executionOptions", this.executionOptions.toObject());
     }
-    if (slaOptions != null) {
-      List<Object> slaOptionsObj = new ArrayList<Object>();
-      for (SlaOption sla : slaOptions) {
+    if (this.slaOptions != null) {
+      final List<Object> slaOptionsObj = new ArrayList<>();
+      for (final SlaOption sla : this.slaOptions) {
         slaOptionsObj.add(sla.toObject());
       }
       jsonObj.put("slaOptions", slaOptionsObj);
@@ -209,45 +201,45 @@ public class ExecuteFlowAction implements TriggerAction {
       throw new Exception("ExecuteFlowAction not properly initialized!");
     }
 
-    Project project = projectManager.getProject(projectId);
+    final Project project = projectManager.getProject(this.projectId);
     if (project == null) {
-      logger.error("Project to execute " + projectId + " does not exist!");
+      logger.error("Project to execute " + this.projectId + " does not exist!");
       throw new RuntimeException("Error finding the project to execute "
-          + projectId);
+          + this.projectId);
     }
 
-    Flow flow = project.getFlow(flowName);
+    final Flow flow = project.getFlow(this.flowName);
     if (flow == null) {
-      logger.error("Flow " + flowName + " cannot be found in project "
+      logger.error("Flow " + this.flowName + " cannot be found in project "
           + project.getName());
       throw new RuntimeException("Error finding the flow to execute "
-          + flowName);
+          + this.flowName);
     }
 
-    ExecutableFlow exflow = new ExecutableFlow(project, flow);
-    exflow.setSubmitUser(submitUser);
+    final ExecutableFlow exflow = new ExecutableFlow(project, flow);
+    exflow.setSubmitUser(this.submitUser);
     exflow.addAllProxyUsers(project.getProxyUsers());
 
-    if (executionOptions == null) {
-      executionOptions = new ExecutionOptions();
+    if (this.executionOptions == null) {
+      this.executionOptions = new ExecutionOptions();
     }
-    if (!executionOptions.isFailureEmailsOverridden()) {
-      executionOptions.setFailureEmails(flow.getFailureEmails());
+    if (!this.executionOptions.isFailureEmailsOverridden()) {
+      this.executionOptions.setFailureEmails(flow.getFailureEmails());
     }
-    if (!executionOptions.isSuccessEmailsOverridden()) {
-      executionOptions.setSuccessEmails(flow.getSuccessEmails());
+    if (!this.executionOptions.isSuccessEmailsOverridden()) {
+      this.executionOptions.setSuccessEmails(flow.getSuccessEmails());
     }
-    exflow.setExecutionOptions(executionOptions);
+    exflow.setExecutionOptions(this.executionOptions);
 
-    if (slaOptions != null && slaOptions.size() > 0) {
-      exflow.setSlaOptions(slaOptions);
+    if (this.slaOptions != null && this.slaOptions.size() > 0) {
+      exflow.setSlaOptions(this.slaOptions);
     }
 
     try {
-      logger.info("Invoking flow " + project.getName() + "." + flowName);
-      executorManager.submitExecutableFlow(exflow, submitUser);
-      logger.info("Invoked flow " + project.getName() + "." + flowName);
-    } catch (ExecutorManagerException e) {
+      logger.info("Invoking flow " + project.getName() + "." + this.flowName);
+      executorManager.submitExecutableFlow(exflow, this.submitUser);
+      logger.info("Invoked flow " + project.getName() + "." + this.flowName);
+    } catch (final ExecutorManagerException e) {
       throw new RuntimeException(e);
     }
   }
@@ -259,12 +251,12 @@ public class ExecuteFlowAction implements TriggerAction {
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 
   @Override
   public String getId() {
-    return actionId;
+    return this.actionId;
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java b/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java
index 8dcb37d..30a021c 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/builtin/ExecutionChecker.java
@@ -16,56 +16,72 @@
 
 package azkaban.trigger.builtin;
 
-import java.util.HashMap;
-import java.util.Map;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableNode;
 import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.executor.Status;
 import azkaban.trigger.ConditionChecker;
+import java.util.HashMap;
+import java.util.Map;
 
 public class ExecutionChecker implements ConditionChecker {
 
   public static final String type = "ExecutionChecker";
   public static ExecutorManagerAdapter executorManager;
 
-  private String checkerId;
-  private int execId;
-  private String jobName;
-  private Status wantedStatus;
+  private final String checkerId;
+  private final int execId;
+  private final String jobName;
+  private final Status wantedStatus;
 
-  public ExecutionChecker(String checkerId, int execId, String jobName,
-      Status wantedStatus) {
+  public ExecutionChecker(final String checkerId, final int execId, final String jobName,
+      final Status wantedStatus) {
     this.checkerId = checkerId;
     this.execId = execId;
     this.jobName = jobName;
     this.wantedStatus = wantedStatus;
   }
 
-  public static void setExecutorManager(ExecutorManagerAdapter em) {
+  public static void setExecutorManager(final ExecutorManagerAdapter em) {
     executorManager = em;
   }
 
+  public static ExecutionChecker createFromJson(final HashMap<String, Object> jsonObj)
+      throws Exception {
+    if (!jsonObj.get("type").equals(type)) {
+      throw new Exception("Cannot create checker of " + type + " from "
+          + jsonObj.get("type"));
+    }
+    final int execId = Integer.valueOf((String) jsonObj.get("execId"));
+    String jobName = null;
+    if (jsonObj.containsKey("jobName")) {
+      jobName = (String) jsonObj.get("jobName");
+    }
+    final String checkerId = (String) jsonObj.get("checkerId");
+    final Status wantedStatus = Status.valueOf((String) jsonObj.get("wantedStatus"));
+
+    return new ExecutionChecker(checkerId, execId, jobName, wantedStatus);
+  }
+
   @Override
   public Object eval() {
-    ExecutableFlow exflow;
+    final ExecutableFlow exflow;
     try {
-      exflow = executorManager.getExecutableFlow(execId);
-    } catch (ExecutorManagerException e) {
+      exflow = executorManager.getExecutableFlow(this.execId);
+    } catch (final ExecutorManagerException e) {
       e.printStackTrace();
       return Boolean.FALSE;
     }
-    if (jobName != null) {
-      ExecutableNode job = exflow.getExecutableNode(jobName);
+    if (this.jobName != null) {
+      final ExecutableNode job = exflow.getExecutableNode(this.jobName);
       if (job != null) {
-        return job.getStatus().equals(wantedStatus);
+        return job.getStatus().equals(this.wantedStatus);
       } else {
         return Boolean.FALSE;
       }
     } else {
-      return exflow.getStatus().equals(wantedStatus);
+      return exflow.getStatus().equals(this.wantedStatus);
     }
 
   }
@@ -81,7 +97,7 @@ public class ExecutionChecker implements ConditionChecker {
 
   @Override
   public String getId() {
-    return checkerId;
+    return this.checkerId;
   }
 
   @Override
@@ -89,39 +105,21 @@ public class ExecutionChecker implements ConditionChecker {
     return type;
   }
 
-  public static ExecutionChecker createFromJson(HashMap<String, Object> jsonObj)
-      throws Exception {
-    if (!jsonObj.get("type").equals(type)) {
-      throw new Exception("Cannot create checker of " + type + " from "
-          + jsonObj.get("type"));
-    }
-    int execId = Integer.valueOf((String) jsonObj.get("execId"));
-    String jobName = null;
-    if (jsonObj.containsKey("jobName")) {
-      jobName = (String) jsonObj.get("jobName");
-    }
-    String checkerId = (String) jsonObj.get("checkerId");
-    Status wantedStatus = Status.valueOf((String) jsonObj.get("wantedStatus"));
-
-    return new ExecutionChecker(checkerId, execId, jobName, wantedStatus);
-  }
-
-  @SuppressWarnings("unchecked")
   @Override
-  public ConditionChecker fromJson(Object obj) throws Exception {
+  public ConditionChecker fromJson(final Object obj) throws Exception {
     return createFromJson((HashMap<String, Object>) obj);
   }
 
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
+    final Map<String, Object> jsonObj = new HashMap<>();
     jsonObj.put("type", type);
-    jsonObj.put("execId", String.valueOf(execId));
-    if (jobName != null) {
-      jsonObj.put("jobName", jobName);
+    jsonObj.put("execId", String.valueOf(this.execId));
+    if (this.jobName != null) {
+      jsonObj.put("jobName", this.jobName);
     }
-    jsonObj.put("wantedStatus", wantedStatus.toString());
-    jsonObj.put("checkerId", checkerId);
+    jsonObj.put("wantedStatus", this.wantedStatus.toString());
+    jsonObj.put("checkerId", this.checkerId);
     return jsonObj;
   }
 
@@ -130,7 +128,7 @@ public class ExecutionChecker implements ConditionChecker {
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 
   @Override
diff --git a/azkaban-common/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java b/azkaban-common/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java
index 01d9537..919ecc4 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/builtin/KillExecutionAction.java
@@ -17,21 +17,18 @@
 package azkaban.trigger.builtin;
 
 import azkaban.Constants;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutorManagerAdapter;
 import azkaban.executor.Status;
 import azkaban.trigger.TriggerAction;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.log4j.Logger;
 
 /**
- * @deprecated Create a new KillExecutionAction using FlowRunnerManager
- * instead of ExecutorManager to kill flow. Still keep the old one here
- * for being compatible with existing SLA trigger in the database.
- * Will remove the old one when all existing triggers expire.
+ * @deprecated Create a new KillExecutionAction using FlowRunnerManager instead of ExecutorManager
+ * to kill flow. Still keep the old one here for being compatible with existing SLA trigger in the
+ * database. Will remove the old one when all existing triggers expire.
  */
 
 @Deprecated
@@ -41,80 +38,77 @@ public class KillExecutionAction implements TriggerAction {
 
   private static final Logger logger = Logger
       .getLogger(KillExecutionAction.class);
-
-  private String actionId;
-  private int execId;
   private static ExecutorManagerAdapter executorManager;
+  private final String actionId;
+  private final int execId;
 
   //todo chengren311: delete this class to executor module when all existing triggers in db are expired
-  public KillExecutionAction(String actionId, int execId) {
+  public KillExecutionAction(final String actionId, final int execId) {
     this.execId = execId;
     this.actionId = actionId;
   }
 
-  public static void setExecutorManager(ExecutorManagerAdapter em) {
+  public static void setExecutorManager(final ExecutorManagerAdapter em) {
     executorManager = em;
   }
 
-  @Override
-  public String getId() {
-    return actionId;
-  }
-
-  @Override
-  public String getType() {
-    return type;
-  }
-
-  @SuppressWarnings("unchecked")
-  public static KillExecutionAction createFromJson(Object obj) {
+  public static KillExecutionAction createFromJson(final Object obj) {
     return createFromJson((HashMap<String, Object>) obj);
   }
 
-  public static KillExecutionAction createFromJson(HashMap<String, Object> obj) {
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
-    String objType = (String) jsonObj.get("type");
+  public static KillExecutionAction createFromJson(final HashMap<String, Object> obj) {
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+    final String objType = (String) jsonObj.get("type");
     if (!objType.equals(type)) {
       throw new RuntimeException("Cannot create action of " + type + " from "
           + objType);
     }
-    String actionId = (String) jsonObj.get("actionId");
-    int execId = Integer.valueOf((String) jsonObj.get("execId"));
+    final String actionId = (String) jsonObj.get("actionId");
+    final int execId = Integer.valueOf((String) jsonObj.get("execId"));
     return new KillExecutionAction(actionId, execId);
   }
 
-  @SuppressWarnings("unchecked")
   @Override
-  public KillExecutionAction fromJson(Object obj) throws Exception {
+  public String getId() {
+    return this.actionId;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public KillExecutionAction fromJson(final Object obj) throws Exception {
     return createFromJson((HashMap<String, Object>) obj);
   }
 
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
-    jsonObj.put("actionId", actionId);
+    final Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("actionId", this.actionId);
     jsonObj.put("type", type);
-    jsonObj.put("execId", String.valueOf(execId));
+    jsonObj.put("execId", String.valueOf(this.execId));
     return jsonObj;
   }
 
   @Override
   public void doAction() throws Exception {
-    ExecutableFlow exFlow = executorManager.getExecutableFlow(execId);
-    logger.info("ready to kill execution " + execId);
+    final ExecutableFlow exFlow = executorManager.getExecutableFlow(this.execId);
+    logger.info("ready to kill execution " + this.execId);
     if (!Status.isStatusFinished(exFlow.getStatus())) {
-      logger.info("Killing execution " + execId);
+      logger.info("Killing execution " + this.execId);
       executorManager.cancelFlow(exFlow, Constants.AZKABAN_SLA_CHECKER_USERNAME);
     }
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 
   @Override
   public String getDescription() {
-    return type + " for " + execId;
+    return type + " for " + this.execId;
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java b/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java
index b300e18..db9dbe0 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaAlertAction.java
@@ -16,18 +16,16 @@
 
 package azkaban.trigger.builtin;
 
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
+import azkaban.ServiceProvider;
 import azkaban.alert.Alerter;
+import azkaban.executor.AlerterHolder;
 import azkaban.executor.ExecutableFlow;
+import azkaban.executor.ExecutorLoader;
 import azkaban.sla.SlaOption;
 import azkaban.trigger.TriggerAction;
-import azkaban.ServiceProvider;
-import azkaban.executor.AlerterHolder;
-import azkaban.executor.ExecutorLoader;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.log4j.Logger;
 
 public class SlaAlertAction implements TriggerAction {
 
@@ -42,7 +40,7 @@ public class SlaAlertAction implements TriggerAction {
   private final ExecutorLoader executorLoader;
 
   //todo chengren311: move this class to executor module when all existing triggers in db are expired
-  public SlaAlertAction(String id, SlaOption slaOption, int execId) {
+  public SlaAlertAction(final String id, final SlaOption slaOption, final int execId) {
     this.actionId = id;
     this.slaOption = slaOption;
     this.execId = execId;
@@ -50,46 +48,45 @@ public class SlaAlertAction implements TriggerAction {
     this.executorLoader = ServiceProvider.SERVICE_PROVIDER.getInstance(ExecutorLoader.class);
   }
 
-  @Override
-  public String getId() {
-    return actionId;
-  }
-
-  @Override
-  public String getType() {
-    return type;
-  }
-
-  @SuppressWarnings("unchecked")
-  public static SlaAlertAction createFromJson(Object obj) throws Exception {
+  public static SlaAlertAction createFromJson(final Object obj) throws Exception {
     return createFromJson((HashMap<String, Object>) obj);
   }
 
-  public static SlaAlertAction createFromJson(HashMap<String, Object> obj)
+  public static SlaAlertAction createFromJson(final HashMap<String, Object> obj)
       throws Exception {
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
     if (!jsonObj.get("type").equals(type)) {
       throw new Exception("Cannot create action of " + type + " from "
           + jsonObj.get("type"));
     }
-    String actionId = (String) jsonObj.get("actionId");
-    SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
-    int execId = Integer.valueOf((String) jsonObj.get("execId"));
+    final String actionId = (String) jsonObj.get("actionId");
+    final SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
+    final int execId = Integer.valueOf((String) jsonObj.get("execId"));
     return new SlaAlertAction(actionId, slaOption, execId);
   }
 
   @Override
-  public TriggerAction fromJson(Object obj) throws Exception {
+  public String getId() {
+    return this.actionId;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public TriggerAction fromJson(final Object obj) throws Exception {
     return createFromJson(obj);
   }
 
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
-    jsonObj.put("actionId", actionId);
+    final Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("actionId", this.actionId);
     jsonObj.put("type", type);
-    jsonObj.put("slaOption", slaOption.toObject());
-    jsonObj.put("execId", String.valueOf(execId));
+    jsonObj.put("slaOption", this.slaOption.toObject());
+    jsonObj.put("execId", String.valueOf(this.execId));
 
     return jsonObj;
   }
@@ -97,15 +94,15 @@ public class SlaAlertAction implements TriggerAction {
   @Override
   public void doAction() throws Exception {
     logger.info("Alerting on sla failure.");
-    Map<String, Object> alert = slaOption.getInfo();
+    final Map<String, Object> alert = this.slaOption.getInfo();
     if (alert.containsKey(SlaOption.ALERT_TYPE)) {
-      String alertType = (String) alert.get(SlaOption.ALERT_TYPE);
-      Alerter alerter = alerters.get(alertType);
+      final String alertType = (String) alert.get(SlaOption.ALERT_TYPE);
+      final Alerter alerter = this.alerters.get(alertType);
       if (alerter != null) {
         try {
-          ExecutableFlow flow = executorLoader.fetchExecutableFlow(execId);
-          alerter.alertOnSla(slaOption, SlaOption.createSlaMessage(slaOption, flow));
-        } catch (Exception e) {
+          final ExecutableFlow flow = this.executorLoader.fetchExecutableFlow(this.execId);
+          alerter.alertOnSla(this.slaOption, SlaOption.createSlaMessage(this.slaOption, flow));
+        } catch (final Exception e) {
           e.printStackTrace();
           logger.error("Failed to alert by " + alertType);
         }
@@ -117,12 +114,12 @@ public class SlaAlertAction implements TriggerAction {
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 
   @Override
   public String getDescription() {
-    return type + " for " + execId + " with " + slaOption.toString();
+    return type + " for " + this.execId + " with " + this.slaOption.toString();
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaChecker.java b/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaChecker.java
index d65b273..5f17d65 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaChecker.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/builtin/SlaChecker.java
@@ -17,111 +17,125 @@
 package azkaban.trigger.builtin;
 
 import azkaban.ServiceProvider;
-import azkaban.executor.ExecutorLoader;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-import org.joda.time.DateTime;
-import org.joda.time.ReadablePeriod;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableNode;
+import azkaban.executor.ExecutorLoader;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.executor.Status;
 import azkaban.sla.SlaOption;
 import azkaban.trigger.ConditionChecker;
 import azkaban.utils.Utils;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.log4j.Logger;
+import org.joda.time.DateTime;
+import org.joda.time.ReadablePeriod;
 
 public class SlaChecker implements ConditionChecker {
 
-  private static final Logger logger = Logger.getLogger(SlaChecker.class);
   public static final String type = "SlaChecker";
-
+  private static final Logger logger = Logger.getLogger(SlaChecker.class);
   private final String id;
   private final SlaOption slaOption;
   private final int execId;
-  private long checkTime = -1;
   private final ExecutorLoader executorLoader;
+  private long checkTime = -1;
 
   //todo chengren311: move this class to executor module when all existing triggers in db are expired
-  public SlaChecker(String id, SlaOption slaOption, int execId) {
+  public SlaChecker(final String id, final SlaOption slaOption, final int execId) {
     this.id = id;
     this.slaOption = slaOption;
     this.execId = execId;
     this.executorLoader = ServiceProvider.SERVICE_PROVIDER.getInstance(ExecutorLoader.class);
   }
 
-  private Boolean isSlaMissed(ExecutableFlow flow) {
-    String type = slaOption.getType();
+  public static SlaChecker createFromJson(final Object obj) throws Exception {
+    return createFromJson((HashMap<String, Object>) obj);
+  }
+
+  public static SlaChecker createFromJson(final HashMap<String, Object> obj)
+      throws Exception {
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+    if (!jsonObj.get("type").equals(type)) {
+      throw new Exception("Cannot create checker of " + type + " from "
+          + jsonObj.get("type"));
+    }
+    final String id = (String) jsonObj.get("id");
+    final SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
+    final int execId = Integer.valueOf((String) jsonObj.get("execId"));
+    return new SlaChecker(id, slaOption, execId);
+  }
+
+  private Boolean isSlaMissed(final ExecutableFlow flow) {
+    final String type = this.slaOption.getType();
     if (flow.getStartTime() < 0) {
       return Boolean.FALSE;
     }
-    Status status;
+    final Status status;
     if (type.equals(SlaOption.TYPE_FLOW_FINISH)) {
-      if (checkTime < flow.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < flow.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(flow.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(flow.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = flow.getStatus();
-      if (checkTime < DateTime.now().getMillis()) {
+      if (this.checkTime < DateTime.now().getMillis()) {
         return !isFlowFinished(status);
       }
     } else if (type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
-      if (checkTime < flow.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < flow.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(flow.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(flow.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = flow.getStatus();
-      if (checkTime < DateTime.now().getMillis()) {
+      if (this.checkTime < DateTime.now().getMillis()) {
         return !isFlowSucceeded(status);
       } else {
         return status.equals(Status.FAILED) || status.equals(Status.KILLED);
       }
     } else if (type.equals(SlaOption.TYPE_JOB_FINISH)) {
-      String jobName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-      ExecutableNode node = flow.getExecutableNode(jobName);
+      final String jobName =
+          (String) this.slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+      final ExecutableNode node = flow.getExecutableNode(jobName);
       if (node.getStartTime() < 0) {
         return Boolean.FALSE;
       }
-      if (checkTime < node.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < node.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(node.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(node.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = node.getStatus();
-      if (checkTime < DateTime.now().getMillis()) {
+      if (this.checkTime < DateTime.now().getMillis()) {
         return !isJobFinished(status);
       }
     } else if (type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
-      String jobName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-      ExecutableNode node = flow.getExecutableNode(jobName);
+      final String jobName =
+          (String) this.slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+      final ExecutableNode node = flow.getExecutableNode(jobName);
       if (node.getStartTime() < 0) {
         return Boolean.FALSE;
       }
-      if (checkTime < node.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < node.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(node.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(node.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = node.getStatus();
-      if (checkTime < DateTime.now().getMillis()) {
+      if (this.checkTime < DateTime.now().getMillis()) {
         return !isJobFinished(status);
       } else {
         return status.equals(Status.FAILED) || status.equals(Status.KILLED);
@@ -130,64 +144,64 @@ public class SlaChecker implements ConditionChecker {
     return Boolean.FALSE;
   }
 
-  private Boolean isSlaGood(ExecutableFlow flow) {
-    String type = slaOption.getType();
+  private Boolean isSlaGood(final ExecutableFlow flow) {
+    final String type = this.slaOption.getType();
     if (flow.getStartTime() < 0) {
       return Boolean.FALSE;
     }
-    Status status;
+    final Status status;
     if (type.equals(SlaOption.TYPE_FLOW_FINISH)) {
-      if (checkTime < flow.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < flow.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(flow.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(flow.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = flow.getStatus();
       return isFlowFinished(status);
     } else if (type.equals(SlaOption.TYPE_FLOW_SUCCEED)) {
-      if (checkTime < flow.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < flow.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(flow.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(flow.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = flow.getStatus();
       return isFlowSucceeded(status);
     } else if (type.equals(SlaOption.TYPE_JOB_FINISH)) {
-      String jobName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-      ExecutableNode node = flow.getExecutableNode(jobName);
+      final String jobName =
+          (String) this.slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+      final ExecutableNode node = flow.getExecutableNode(jobName);
       if (node.getStartTime() < 0) {
         return Boolean.FALSE;
       }
-      if (checkTime < node.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < node.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(node.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(node.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = node.getStatus();
       return isJobFinished(status);
     } else if (type.equals(SlaOption.TYPE_JOB_SUCCEED)) {
-      String jobName =
-          (String) slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
-      ExecutableNode node = flow.getExecutableNode(jobName);
+      final String jobName =
+          (String) this.slaOption.getInfo().get(SlaOption.INFO_JOB_NAME);
+      final ExecutableNode node = flow.getExecutableNode(jobName);
       if (node.getStartTime() < 0) {
         return Boolean.FALSE;
       }
-      if (checkTime < node.getStartTime()) {
-        ReadablePeriod duration =
-            Utils.parsePeriodString((String) slaOption.getInfo().get(
+      if (this.checkTime < node.getStartTime()) {
+        final ReadablePeriod duration =
+            Utils.parsePeriodString((String) this.slaOption.getInfo().get(
                 SlaOption.INFO_DURATION));
-        DateTime startTime = new DateTime(node.getStartTime());
-        DateTime nextCheckTime = startTime.plus(duration);
+        final DateTime startTime = new DateTime(node.getStartTime());
+        final DateTime nextCheckTime = startTime.plus(duration);
         this.checkTime = nextCheckTime.getMillis();
       }
       status = node.getStatus();
@@ -199,11 +213,11 @@ public class SlaChecker implements ConditionChecker {
   // return true to trigger sla action
   @Override
   public Object eval() {
-    logger.info("Checking sla for execution " + execId);
-    ExecutableFlow flow;
+    logger.info("Checking sla for execution " + this.execId);
+    final ExecutableFlow flow;
     try {
-      flow = executorLoader.fetchExecutableFlow(execId);
-    } catch (ExecutorManagerException e) {
+      flow = this.executorLoader.fetchExecutableFlow(this.execId);
+    } catch (final ExecutorManagerException e) {
       logger.error("Can't get executable flow.", e);
       e.printStackTrace();
       // something wrong, send out alerts
@@ -213,10 +227,10 @@ public class SlaChecker implements ConditionChecker {
   }
 
   public Object isSlaFailed() {
-    ExecutableFlow flow;
+    final ExecutableFlow flow;
     try {
-      flow = executorLoader.fetchExecutableFlow(execId);
-    } catch (ExecutorManagerException e) {
+      flow = this.executorLoader.fetchExecutableFlow(this.execId);
+    } catch (final ExecutorManagerException e) {
       logger.error("Can't get executable flow.", e);
       // something wrong, send out alerts
       return Boolean.TRUE;
@@ -225,10 +239,10 @@ public class SlaChecker implements ConditionChecker {
   }
 
   public Object isSlaPassed() {
-    ExecutableFlow flow;
+    final ExecutableFlow flow;
     try {
-      flow = executorLoader.fetchExecutableFlow(execId);
-    } catch (ExecutorManagerException e) {
+      flow = this.executorLoader.fetchExecutableFlow(this.execId);
+    } catch (final ExecutorManagerException e) {
       logger.error("Can't get executable flow.", e);
       // something wrong, send out alerts
       return Boolean.TRUE;
@@ -247,7 +261,7 @@ public class SlaChecker implements ConditionChecker {
 
   @Override
   public String getId() {
-    return id;
+    return this.id;
   }
 
   @Override
@@ -256,35 +270,17 @@ public class SlaChecker implements ConditionChecker {
   }
 
   @Override
-  public ConditionChecker fromJson(Object obj) throws Exception {
+  public ConditionChecker fromJson(final Object obj) throws Exception {
     return createFromJson(obj);
   }
 
-  @SuppressWarnings("unchecked")
-  public static SlaChecker createFromJson(Object obj) throws Exception {
-    return createFromJson((HashMap<String, Object>) obj);
-  }
-
-  public static SlaChecker createFromJson(HashMap<String, Object> obj)
-      throws Exception {
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
-    if (!jsonObj.get("type").equals(type)) {
-      throw new Exception("Cannot create checker of " + type + " from "
-          + jsonObj.get("type"));
-    }
-    String id = (String) jsonObj.get("id");
-    SlaOption slaOption = SlaOption.fromObject(jsonObj.get("slaOption"));
-    int execId = Integer.valueOf((String) jsonObj.get("execId"));
-    return new SlaChecker(id, slaOption, execId);
-  }
-
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
+    final Map<String, Object> jsonObj = new HashMap<>();
     jsonObj.put("type", type);
-    jsonObj.put("id", id);
-    jsonObj.put("slaOption", slaOption.toObject());
-    jsonObj.put("execId", String.valueOf(execId));
+    jsonObj.put("id", this.id);
+    jsonObj.put("slaOption", this.slaOption.toObject());
+    jsonObj.put("execId", String.valueOf(this.execId));
 
     return jsonObj;
   }
@@ -295,15 +291,15 @@ public class SlaChecker implements ConditionChecker {
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 
   @Override
   public long getNextCheckTime() {
-    return checkTime;
+    return this.checkTime;
   }
 
-  private boolean isFlowFinished(Status status) {
+  private boolean isFlowFinished(final Status status) {
     if (status.equals(Status.FAILED) || status.equals(Status.KILLED)
         || status.equals(Status.SUCCEEDED)) {
       return Boolean.TRUE;
@@ -312,11 +308,11 @@ public class SlaChecker implements ConditionChecker {
     }
   }
 
-  private boolean isFlowSucceeded(Status status) {
+  private boolean isFlowSucceeded(final Status status) {
     return status.equals(Status.SUCCEEDED);
   }
 
-  private boolean isJobFinished(Status status) {
+  private boolean isJobFinished(final Status status) {
     if (status.equals(Status.FAILED) || status.equals(Status.KILLED)
         || status.equals(Status.SUCCEEDED)) {
       return Boolean.TRUE;
@@ -325,7 +321,7 @@ public class SlaChecker implements ConditionChecker {
     }
   }
 
-  private boolean isJobSucceeded(Status status) {
+  private boolean isJobSucceeded(final Status status) {
     return status.equals(Status.SUCCEEDED);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/CheckerTypeLoader.java b/azkaban-common/src/main/java/azkaban/trigger/CheckerTypeLoader.java
index 664e2fd..30fe9f8 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/CheckerTypeLoader.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/CheckerTypeLoader.java
@@ -16,47 +16,43 @@
 
 package azkaban.trigger;
 
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
 import java.util.HashMap;
 import java.util.Map;
-
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Props;
-import azkaban.utils.Utils;
-
 public class CheckerTypeLoader {
 
-  private static Logger logger = Logger.getLogger(CheckerTypeLoader.class);
-
   public static final String DEFAULT_CONDITION_CHECKER_PLUGIN_DIR =
       "plugins/conditioncheckers";
-
+  private static final Logger logger = Logger.getLogger(CheckerTypeLoader.class);
   protected static Map<String, Class<? extends ConditionChecker>> checkerToClass =
-      new HashMap<String, Class<? extends ConditionChecker>>();
+      new HashMap<>();
+
+  public static void registerBuiltinCheckers(
+      final Map<String, Class<? extends ConditionChecker>> builtinCheckers) {
+    checkerToClass.putAll(checkerToClass);
+    for (final String type : builtinCheckers.keySet()) {
+      logger.info("Loaded " + type + " checker.");
+    }
+  }
 
-  public void init(Props props) throws TriggerException {
+  public void init(final Props props) throws TriggerException {
   }
 
-  public synchronized void registerCheckerType(String type,
-      Class<? extends ConditionChecker> checkerClass) {
+  public synchronized void registerCheckerType(final String type,
+      final Class<? extends ConditionChecker> checkerClass) {
     logger.info("Registering checker " + type);
     if (!checkerToClass.containsKey(type)) {
       checkerToClass.put(type, checkerClass);
     }
   }
 
-  public static void registerBuiltinCheckers(
-      Map<String, Class<? extends ConditionChecker>> builtinCheckers) {
-    checkerToClass.putAll(checkerToClass);
-    for (String type : builtinCheckers.keySet()) {
-      logger.info("Loaded " + type + " checker.");
-    }
-  }
-
-  public ConditionChecker createCheckerFromJson(String type, Object obj)
+  public ConditionChecker createCheckerFromJson(final String type, final Object obj)
       throws Exception {
     ConditionChecker checker = null;
-    Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
+    final Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
     if (checkerClass == null) {
       throw new Exception("Checker type " + type + " not supported!");
     }
@@ -68,9 +64,9 @@ public class CheckerTypeLoader {
     return checker;
   }
 
-  public ConditionChecker createChecker(String type, Object... args) {
+  public ConditionChecker createChecker(final String type, final Object... args) {
     ConditionChecker checker = null;
-    Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
+    final Class<? extends ConditionChecker> checkerClass = checkerToClass.get(type);
     checker = (ConditionChecker) Utils.callConstructor(checkerClass, args);
 
     return checker;
diff --git a/azkaban-common/src/main/java/azkaban/trigger/Condition.java b/azkaban-common/src/main/java/azkaban/trigger/Condition.java
index 5e8e7b6..4490721 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/Condition.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/Condition.java
@@ -20,7 +20,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.commons.jexl2.Expression;
 import org.apache.commons.jexl2.JexlEngine;
 import org.apache.commons.jexl2.MapContext;
@@ -29,152 +28,151 @@ import org.joda.time.DateTime;
 
 public class Condition {
 
-  private static Logger logger = Logger.getLogger(Condition.class);
+  private static final Logger logger = Logger.getLogger(Condition.class);
 
   private static JexlEngine jexl = new JexlEngine();
   private static CheckerTypeLoader checkerLoader = null;
+  private final MapContext context = new MapContext();
   private Expression expression;
   private Map<String, ConditionChecker> checkers =
-      new HashMap<String, ConditionChecker>();
-  private MapContext context = new MapContext();
+      new HashMap<>();
   private Long nextCheckTime = -1L;
 
-  public Condition(Map<String, ConditionChecker> checkers, String expr) {
+  public Condition(final Map<String, ConditionChecker> checkers, final String expr) {
     setCheckers(checkers);
     this.expression = jexl.createExpression(expr);
     updateNextCheckTime();
   }
 
-  public Condition(Map<String, ConditionChecker> checkers, String expr,
-      long nextCheckTime) {
+  public Condition(final Map<String, ConditionChecker> checkers, final String expr,
+      final long nextCheckTime) {
     this.nextCheckTime = nextCheckTime;
     setCheckers(checkers);
     this.expression = jexl.createExpression(expr);
   }
 
-  public synchronized static void setJexlEngine(JexlEngine jexl) {
+  public synchronized static void setJexlEngine(final JexlEngine jexl) {
     Condition.jexl = jexl;
   }
 
-  public synchronized static void setCheckerLoader(CheckerTypeLoader loader) {
+  protected static CheckerTypeLoader getCheckerLoader() {
+    return checkerLoader;
+  }
+
+  public synchronized static void setCheckerLoader(final CheckerTypeLoader loader) {
     Condition.checkerLoader = loader;
   }
 
-  protected static CheckerTypeLoader getCheckerLoader() {
-    return checkerLoader;
+  public static Condition fromJson(final Object obj) throws Exception {
+    if (checkerLoader == null) {
+      throw new Exception("Condition Checker loader not initialized!");
+    }
+
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+    Condition cond = null;
+
+    try {
+      final Map<String, ConditionChecker> checkers =
+          new HashMap<>();
+      final List<Object> checkersJson = (List<Object>) jsonObj.get("checkers");
+      for (final Object oneCheckerJson : checkersJson) {
+        final Map<String, Object> oneChecker =
+            (HashMap<String, Object>) oneCheckerJson;
+        final String type = (String) oneChecker.get("type");
+        final ConditionChecker ck =
+            checkerLoader.createCheckerFromJson(type,
+                oneChecker.get("checkerJson"));
+        checkers.put(ck.getId(), ck);
+      }
+      final String expr = (String) jsonObj.get("expression");
+      final Long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
+
+      cond = new Condition(checkers, expr, nextCheckTime);
+
+    } catch (final Exception e) {
+      e.printStackTrace();
+      logger.error("Failed to recreate condition from json.", e);
+      throw new Exception("Failed to recreate condition from json.", e);
+    }
+
+    return cond;
   }
 
-  protected void registerChecker(ConditionChecker checker) {
-    checkers.put(checker.getId(), checker);
-    context.set(checker.getId(), checker);
+  protected void registerChecker(final ConditionChecker checker) {
+    this.checkers.put(checker.getId(), checker);
+    this.context.set(checker.getId(), checker);
     updateNextCheckTime();
   }
 
   public long getNextCheckTime() {
-    return nextCheckTime;
+    return this.nextCheckTime;
   }
 
   public Map<String, ConditionChecker> getCheckers() {
     return this.checkers;
   }
 
-  public void setCheckers(Map<String, ConditionChecker> checkers) {
+  public void setCheckers(final Map<String, ConditionChecker> checkers) {
     this.checkers = checkers;
-    for (ConditionChecker checker : checkers.values()) {
+    for (final ConditionChecker checker : checkers.values()) {
       this.context.set(checker.getId(), checker);
     }
     updateNextCheckTime();
   }
 
-  public void updateCheckTime(Long ct) {
-    if (nextCheckTime < ct) {
-      nextCheckTime = ct;
+  public void updateCheckTime(final Long ct) {
+    if (this.nextCheckTime < ct) {
+      this.nextCheckTime = ct;
     }
   }
 
   private void updateNextCheckTime() {
     long time = Long.MAX_VALUE;
-    for (ConditionChecker checker : checkers.values()) {
+    for (final ConditionChecker checker : this.checkers.values()) {
       time = Math.min(time, checker.getNextCheckTime());
     }
     this.nextCheckTime = time;
   }
 
   public void resetCheckers() {
-    for (ConditionChecker checker : checkers.values()) {
+    for (final ConditionChecker checker : this.checkers.values()) {
       checker.reset();
     }
     updateNextCheckTime();
     logger.info("Done resetting checkers. The next check time will be "
-        + new DateTime(nextCheckTime));
+        + new DateTime(this.nextCheckTime));
   }
 
   public String getExpression() {
     return this.expression.getExpression();
   }
 
-  public void setExpression(String expr) {
+  public void setExpression(final String expr) {
     this.expression = jexl.createExpression(expr);
   }
 
   public boolean isMet() {
     if (logger.isDebugEnabled()) {
-      logger.debug("Testing condition " + expression);
+      logger.debug("Testing condition " + this.expression);
     }
-    return expression.evaluate(context).equals(Boolean.TRUE);
+    return this.expression.evaluate(this.context).equals(Boolean.TRUE);
   }
 
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
-    jsonObj.put("expression", expression.getExpression());
+    final Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("expression", this.expression.getExpression());
 
-    List<Object> checkersJson = new ArrayList<Object>();
-    for (ConditionChecker checker : checkers.values()) {
-      Map<String, Object> oneChecker = new HashMap<String, Object>();
+    final List<Object> checkersJson = new ArrayList<>();
+    for (final ConditionChecker checker : this.checkers.values()) {
+      final Map<String, Object> oneChecker = new HashMap<>();
       oneChecker.put("type", checker.getType());
       oneChecker.put("checkerJson", checker.toJson());
       checkersJson.add(oneChecker);
     }
     jsonObj.put("checkers", checkersJson);
-    jsonObj.put("nextCheckTime", String.valueOf(nextCheckTime));
+    jsonObj.put("nextCheckTime", String.valueOf(this.nextCheckTime));
 
     return jsonObj;
   }
 
-  @SuppressWarnings("unchecked")
-  public static Condition fromJson(Object obj) throws Exception {
-    if (checkerLoader == null) {
-      throw new Exception("Condition Checker loader not initialized!");
-    }
-
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
-    Condition cond = null;
-
-    try {
-      Map<String, ConditionChecker> checkers =
-          new HashMap<String, ConditionChecker>();
-      List<Object> checkersJson = (List<Object>) jsonObj.get("checkers");
-      for (Object oneCheckerJson : checkersJson) {
-        Map<String, Object> oneChecker =
-            (HashMap<String, Object>) oneCheckerJson;
-        String type = (String) oneChecker.get("type");
-        ConditionChecker ck =
-            checkerLoader.createCheckerFromJson(type,
-                oneChecker.get("checkerJson"));
-        checkers.put(ck.getId(), ck);
-      }
-      String expr = (String) jsonObj.get("expression");
-      Long nextCheckTime = Long.valueOf((String) jsonObj.get("nextCheckTime"));
-
-      cond = new Condition(checkers, expr, nextCheckTime);
-
-    } catch (Exception e) {
-      e.printStackTrace();
-      logger.error("Failed to recreate condition from json.", e);
-      throw new Exception("Failed to recreate condition from json.", e);
-    }
-
-    return cond;
-  }
-
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerImpl.java b/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerImpl.java
index e60e942..d723f86 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerImpl.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerImpl.java
@@ -21,53 +21,57 @@ import azkaban.db.DatabaseOperator;
 import azkaban.db.SQLTransaction;
 import azkaban.utils.GZIPUtils;
 import azkaban.utils.JSONUtils;
-
 import com.google.inject.Inject;
-
 import java.io.IOException;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-
 import org.apache.commons.dbutils.ResultSetHandler;
 import org.apache.log4j.Logger;
 import org.joda.time.DateTime;
 
 
 public class JdbcTriggerImpl implements TriggerLoader {
+
   private static final String TRIGGER_TABLE_NAME = "triggers";
   private static final String GET_UPDATED_TRIGGERS =
-      "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + TRIGGER_TABLE_NAME + " WHERE modify_time>=?";
+      "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + TRIGGER_TABLE_NAME
+          + " WHERE modify_time>=?";
   private static final String GET_ALL_TRIGGERS =
       "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + TRIGGER_TABLE_NAME;
   private static final String GET_TRIGGER =
-      "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + TRIGGER_TABLE_NAME + " WHERE trigger_id=?";
-  private static final String ADD_TRIGGER = "INSERT INTO " + TRIGGER_TABLE_NAME + " ( modify_time) values (?)";
-  private static final String REMOVE_TRIGGER = "DELETE FROM " + TRIGGER_TABLE_NAME + " WHERE trigger_id=?";
+      "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM " + TRIGGER_TABLE_NAME
+          + " WHERE trigger_id=?";
+  private static final String ADD_TRIGGER =
+      "INSERT INTO " + TRIGGER_TABLE_NAME + " ( modify_time) values (?)";
+  private static final String REMOVE_TRIGGER =
+      "DELETE FROM " + TRIGGER_TABLE_NAME + " WHERE trigger_id=?";
   private static final String UPDATE_TRIGGER =
-      "UPDATE " + TRIGGER_TABLE_NAME + " SET trigger_source=?, modify_time=?, enc_type=?, data=? WHERE trigger_id=?";
-  private static Logger logger = Logger.getLogger(JdbcTriggerImpl.class);
+      "UPDATE " + TRIGGER_TABLE_NAME
+          + " SET trigger_source=?, modify_time=?, enc_type=?, data=? WHERE trigger_id=?";
+  private static final Logger logger = Logger.getLogger(JdbcTriggerImpl.class);
   private final DatabaseOperator dbOperator;
-  private EncodingType defaultEncodingType = EncodingType.GZIP;
+  private final EncodingType defaultEncodingType = EncodingType.GZIP;
 
   @Inject
-  public JdbcTriggerImpl(DatabaseOperator databaseOperator) {
+  public JdbcTriggerImpl(final DatabaseOperator databaseOperator) {
     this.dbOperator = databaseOperator;
   }
 
   @Override
-  public List<Trigger> getUpdatedTriggers(long lastUpdateTime) throws TriggerLoaderException {
+  public List<Trigger> getUpdatedTriggers(final long lastUpdateTime) throws TriggerLoaderException {
     logger.info("Loading triggers changed since " + new DateTime(lastUpdateTime).toString());
 
-    ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+    final ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
 
     try {
-      List<Trigger> triggers = dbOperator.query(GET_UPDATED_TRIGGERS, handler, lastUpdateTime);
+      final List<Trigger> triggers = this.dbOperator
+          .query(GET_UPDATED_TRIGGERS, handler, lastUpdateTime);
       logger.info("Loaded " + triggers.size() + " triggers.");
       return triggers;
-    } catch (SQLException ex) {
+    } catch (final SQLException ex) {
       throw new TriggerLoaderException("Loading triggers from db failed.", ex);
     }
   }
@@ -76,28 +80,29 @@ public class JdbcTriggerImpl implements TriggerLoader {
   public List<Trigger> loadTriggers() throws TriggerLoaderException {
     logger.info("Loading all triggers from db.");
 
-    ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+    final ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
 
     try {
-      List<Trigger> triggers = dbOperator.query(GET_ALL_TRIGGERS, handler);
+      final List<Trigger> triggers = this.dbOperator.query(GET_ALL_TRIGGERS, handler);
       logger.info("Loaded " + triggers.size() + " triggers.");
       return triggers;
-    } catch (SQLException ex) {
+    } catch (final SQLException ex) {
       throw new TriggerLoaderException("Loading triggers from db failed.", ex);
     }
   }
 
   @Override
-  public void removeTrigger(Trigger t) throws TriggerLoaderException {
+  public void removeTrigger(final Trigger t) throws TriggerLoaderException {
     logger.info("Removing trigger " + t.toString() + " from db.");
 
     try {
-      int removes = dbOperator.update(REMOVE_TRIGGER, t.getTriggerId());
+      final int removes = this.dbOperator.update(REMOVE_TRIGGER, t.getTriggerId());
       if (removes == 0) {
         throw new TriggerLoaderException("No trigger has been removed.");
       }
-    } catch (SQLException ex) {
-      throw new TriggerLoaderException("Remove trigger " + t.getTriggerId() + " from db failed. ", ex);
+    } catch (final SQLException ex) {
+      throw new TriggerLoaderException("Remove trigger " + t.getTriggerId() + " from db failed. ",
+          ex);
     }
   }
 
@@ -105,76 +110,79 @@ public class JdbcTriggerImpl implements TriggerLoader {
    * TODO: Don't understand why we need synchronized here.
    */
   @Override
-  public synchronized void addTrigger(Trigger t) throws TriggerLoaderException {
+  public synchronized void addTrigger(final Trigger t) throws TriggerLoaderException {
     logger.info("Inserting trigger " + t.toString() + " into db.");
 
-    SQLTransaction<Long> insertAndGetLastID = transOperator -> {
+    final SQLTransaction<Long> insertAndGetLastID = transOperator -> {
       transOperator.update(ADD_TRIGGER, DateTime.now().getMillis());
       transOperator.getConnection().commit();
       return transOperator.getLastInsertId();
     };
 
     try {
-      long id = dbOperator.transaction(insertAndGetLastID);
+      final long id = this.dbOperator.transaction(insertAndGetLastID);
       t.setTriggerId((int) id);
       updateTrigger(t);
       logger.info("uploaded trigger " + t.getDescription());
-    } catch (SQLException ex) {
-      logger.error("Adding Trigger " + t.getTriggerId() + " failed." );
-      throw new TriggerLoaderException("trigger id is not properly created.",ex);
+    } catch (final SQLException ex) {
+      logger.error("Adding Trigger " + t.getTriggerId() + " failed.");
+      throw new TriggerLoaderException("trigger id is not properly created.", ex);
     }
   }
 
   @Override
-  public void updateTrigger(Trigger t) throws TriggerLoaderException {
+  public void updateTrigger(final Trigger t) throws TriggerLoaderException {
     logger.info("Updating trigger " + t.getTriggerId() + " into db.");
     t.setLastModifyTime(System.currentTimeMillis());
-    updateTrigger(t, defaultEncodingType);
+    updateTrigger(t, this.defaultEncodingType);
   }
 
-  private void updateTrigger(Trigger t, EncodingType encType) throws TriggerLoaderException {
+  private void updateTrigger(final Trigger t, final EncodingType encType)
+      throws TriggerLoaderException {
 
-    String json = JSONUtils.toJSON(t.toJson());
+    final String json = JSONUtils.toJSON(t.toJson());
     byte[] data = null;
     try {
-      byte[] stringData = json.getBytes("UTF-8");
+      final byte[] stringData = json.getBytes("UTF-8");
       data = stringData;
 
       if (encType == EncodingType.GZIP) {
         data = GZIPUtils.gzipBytes(stringData);
       }
-      logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:" + data.length);
-    } catch (IOException e) {
+      logger.debug(
+          "NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:" + data.length);
+    } catch (final IOException e) {
       logger.error("Trigger encoding fails", e);
       throw new TriggerLoaderException("Error encoding the trigger " + t.toString(), e);
     }
 
     try {
-      int updates = dbOperator.update(UPDATE_TRIGGER, t.getSource(), t.getLastModifyTime(), encType.getNumVal(), data,
-          t.getTriggerId());
+      final int updates = this.dbOperator
+          .update(UPDATE_TRIGGER, t.getSource(), t.getLastModifyTime(), encType.getNumVal(), data,
+              t.getTriggerId());
       if (updates == 0) {
         throw new TriggerLoaderException("No trigger has been updated.");
       }
-    } catch (SQLException ex) {
-      logger.error("Updating Trigger " + t.getTriggerId() + " failed." );
+    } catch (final SQLException ex) {
+      logger.error("Updating Trigger " + t.getTriggerId() + " failed.");
       throw new TriggerLoaderException("DB Trigger update failed. ", ex);
     }
   }
 
   @Override
-  public Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
+  public Trigger loadTrigger(final int triggerId) throws TriggerLoaderException {
     logger.info("Loading trigger " + triggerId + " from db.");
-    ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+    final ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
 
     try {
-      List<Trigger> triggers = dbOperator.query(GET_TRIGGER, handler, triggerId);
+      final List<Trigger> triggers = this.dbOperator.query(GET_TRIGGER, handler, triggerId);
 
       if (triggers.size() == 0) {
         logger.error("Loaded 0 triggers. Failed to load trigger " + triggerId);
         throw new TriggerLoaderException("Loaded 0 triggers. Failed to load trigger " + triggerId);
       }
       return triggers.get(0);
-    } catch (SQLException ex) {
+    } catch (final SQLException ex) {
       logger.error("Failed to load trigger " + triggerId);
       throw new TriggerLoaderException("Load a specific trigger failed.", ex);
     }
@@ -183,27 +191,27 @@ public class JdbcTriggerImpl implements TriggerLoader {
   public class TriggerResultHandler implements ResultSetHandler<List<Trigger>> {
 
     @Override
-    public List<Trigger> handle(ResultSet rs) throws SQLException {
+    public List<Trigger> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
         return Collections.<Trigger>emptyList();
       }
 
-      ArrayList<Trigger> triggers = new ArrayList<Trigger>();
+      final ArrayList<Trigger> triggers = new ArrayList<>();
       do {
-        int triggerId = rs.getInt(1);
-        int encodingType = rs.getInt(4);
-        byte[] data = rs.getBytes(5);
+        final int triggerId = rs.getInt(1);
+        final int encodingType = rs.getInt(4);
+        final byte[] data = rs.getBytes(5);
 
         Object jsonObj = null;
         if (data != null) {
-          EncodingType encType = EncodingType.fromInteger(encodingType);
+          final EncodingType encType = EncodingType.fromInteger(encodingType);
 
           try {
             // Convoluted way to inflate strings. Should find common package or
             // helper function.
             jsonObj = JSONUtils.parseJSONFromString(encType == EncodingType.GZIP ?
                 GZIPUtils.unGzipString(data, "UTF-8") : new String(data, "UTF-8"));
-          } catch (IOException e) {
+          } catch (final IOException e) {
             throw new SQLException("Error reconstructing trigger data ");
           }
         }
@@ -212,7 +220,7 @@ public class JdbcTriggerImpl implements TriggerLoader {
         try {
           t = Trigger.fromJson(jsonObj);
           triggers.add(t);
-        } catch (Exception e) {
+        } catch (final Exception e) {
           logger.error("Failed to load trigger " + triggerId, e);
         }
       } while (rs.next());
diff --git a/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerLoader.java b/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerLoader.java
index ab773f1..9ea4255 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerLoader.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/JdbcTriggerLoader.java
@@ -16,6 +16,10 @@
 
 package azkaban.trigger;
 
+import azkaban.database.AbstractJdbcLoader;
+import azkaban.utils.GZIPUtils;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Props;
 import java.io.IOException;
 import java.sql.Connection;
 import java.sql.ResultSet;
@@ -23,71 +27,57 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
-
 import org.apache.commons.dbutils.DbUtils;
 import org.apache.commons.dbutils.QueryRunner;
 import org.apache.commons.dbutils.ResultSetHandler;
 import org.apache.log4j.Logger;
-
 import org.joda.time.DateTime;
 
-import azkaban.database.AbstractJdbcLoader;
-import azkaban.utils.GZIPUtils;
-import azkaban.utils.JSONUtils;
-import azkaban.utils.Props;
-
 public class JdbcTriggerLoader extends AbstractJdbcLoader implements
     TriggerLoader {
-  private static Logger logger = Logger.getLogger(JdbcTriggerLoader.class);
-
-  private EncodingType defaultEncodingType = EncodingType.GZIP;
 
   private static final String triggerTblName = "triggers";
-
   private static final String GET_UPDATED_TRIGGERS =
       "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM "
           + triggerTblName + " WHERE modify_time>=?";
-
-  private static String GET_ALL_TRIGGERS =
+  private static final Logger logger = Logger.getLogger(JdbcTriggerLoader.class);
+  private static final String GET_ALL_TRIGGERS =
       "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM "
           + triggerTblName;
-
-  private static String GET_TRIGGER =
+  private static final String GET_TRIGGER =
       "SELECT trigger_id, trigger_source, modify_time, enc_type, data FROM "
           + triggerTblName + " WHERE trigger_id=?";
-
-  private static String ADD_TRIGGER = "INSERT INTO " + triggerTblName
+  private static final String ADD_TRIGGER = "INSERT INTO " + triggerTblName
       + " ( modify_time) values (?)";
-
-  private static String REMOVE_TRIGGER = "DELETE FROM " + triggerTblName
+  private static final String REMOVE_TRIGGER = "DELETE FROM " + triggerTblName
       + " WHERE trigger_id=?";
-
-  private static String UPDATE_TRIGGER =
+  private static final String UPDATE_TRIGGER =
       "UPDATE "
           + triggerTblName
           + " SET trigger_source=?, modify_time=?, enc_type=?, data=? WHERE trigger_id=?";
+  private EncodingType defaultEncodingType = EncodingType.GZIP;
 
-  public EncodingType getDefaultEncodingType() {
-    return defaultEncodingType;
+  public JdbcTriggerLoader(final Props props) {
+    super(props);
   }
 
-  public void setDefaultEncodingType(EncodingType defaultEncodingType) {
-    this.defaultEncodingType = defaultEncodingType;
+  public EncodingType getDefaultEncodingType() {
+    return this.defaultEncodingType;
   }
 
-  public JdbcTriggerLoader(Props props) {
-    super(props);
+  public void setDefaultEncodingType(final EncodingType defaultEncodingType) {
+    this.defaultEncodingType = defaultEncodingType;
   }
 
   @Override
-  public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+  public List<Trigger> getUpdatedTriggers(final long lastUpdateTime)
       throws TriggerLoaderException {
     logger.info("Loading triggers changed since "
         + new DateTime(lastUpdateTime).toString());
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
-    QueryRunner runner = new QueryRunner();
-    ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+    final QueryRunner runner = new QueryRunner();
+    final ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
 
     List<Trigger> triggers;
 
@@ -95,7 +85,7 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
       triggers =
           runner.query(connection, GET_UPDATED_TRIGGERS, handler,
               lastUpdateTime);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(GET_ALL_TRIGGERS + " failed.");
 
       throw new TriggerLoaderException("Loading triggers from db failed. ", e);
@@ -111,16 +101,16 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
   @Override
   public List<Trigger> loadTriggers() throws TriggerLoaderException {
     logger.info("Loading all triggers from db.");
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
 
-    QueryRunner runner = new QueryRunner();
-    ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+    final QueryRunner runner = new QueryRunner();
+    final ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
 
     List<Trigger> triggers;
 
     try {
       triggers = runner.query(connection, GET_ALL_TRIGGERS, handler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(GET_ALL_TRIGGERS + " failed.");
 
       throw new TriggerLoaderException("Loading triggers from db failed. ", e);
@@ -134,16 +124,16 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void removeTrigger(Trigger t) throws TriggerLoaderException {
+  public void removeTrigger(final Trigger t) throws TriggerLoaderException {
     logger.info("Removing trigger " + t.toString() + " from db.");
 
-    QueryRunner runner = createQueryRunner();
+    final QueryRunner runner = createQueryRunner();
     try {
-      int removes = runner.update(REMOVE_TRIGGER, t.getTriggerId());
+      final int removes = runner.update(REMOVE_TRIGGER, t.getTriggerId());
       if (removes == 0) {
         throw new TriggerLoaderException("No trigger has been removed.");
       }
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(REMOVE_TRIGGER + " failed.");
       throw new TriggerLoaderException("Remove trigger " + t.toString()
           + " from db failed. ", e);
@@ -151,25 +141,25 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
   }
 
   @Override
-  public void addTrigger(Trigger t) throws TriggerLoaderException {
+  public void addTrigger(final Trigger t) throws TriggerLoaderException {
     logger.info("Inserting trigger " + t.toString() + " into db.");
     t.setLastModifyTime(System.currentTimeMillis());
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
     try {
-      addTrigger(connection, t, defaultEncodingType);
-    } catch (Exception e) {
+      addTrigger(connection, t, this.defaultEncodingType);
+    } catch (final Exception e) {
       throw new TriggerLoaderException("Error uploading trigger", e);
     } finally {
       DbUtils.closeQuietly(connection);
     }
   }
 
-  private synchronized void addTrigger(Connection connection, Trigger t,
-      EncodingType encType) throws TriggerLoaderException {
+  private synchronized void addTrigger(final Connection connection, final Trigger t,
+      final EncodingType encType) throws TriggerLoaderException {
 
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
 
-    long id;
+    final long id;
 
     try {
       runner.update(connection, ADD_TRIGGER, DateTime.now().getMillis());
@@ -186,22 +176,22 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
       t.setTriggerId((int) id);
       updateTrigger(t);
       logger.info("uploaded trigger " + t.getDescription());
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       throw new TriggerLoaderException("Error creating trigger.", e);
     }
 
   }
 
   @Override
-  public void updateTrigger(Trigger t) throws TriggerLoaderException {
+  public void updateTrigger(final Trigger t) throws TriggerLoaderException {
     if (logger.isDebugEnabled()) {
       logger.debug("Updating trigger " + t.getTriggerId() + " into db.");
     }
     t.setLastModifyTime(System.currentTimeMillis());
-    Connection connection = getConnection();
+    final Connection connection = getConnection();
     try {
-      updateTrigger(connection, t, defaultEncodingType);
-    } catch (Exception e) {
+      updateTrigger(connection, t, this.defaultEncodingType);
+    } catch (final Exception e) {
       e.printStackTrace();
       throw new TriggerLoaderException("Failed to update trigger "
           + t.toString() + " into db!");
@@ -210,13 +200,13 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
     }
   }
 
-  private void updateTrigger(Connection connection, Trigger t,
-      EncodingType encType) throws TriggerLoaderException {
+  private void updateTrigger(final Connection connection, final Trigger t,
+      final EncodingType encType) throws TriggerLoaderException {
 
-    String json = JSONUtils.toJSON(t.toJson());
+    final String json = JSONUtils.toJSON(t.toJson());
     byte[] data = null;
     try {
-      byte[] stringData = json.getBytes("UTF-8");
+      final byte[] stringData = json.getBytes("UTF-8");
       data = stringData;
 
       if (encType == EncodingType.GZIP) {
@@ -224,15 +214,15 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
       }
       logger.debug("NumChars: " + json.length() + " UTF-8:" + stringData.length
           + " Gzip:" + data.length);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new TriggerLoaderException("Error encoding the trigger "
           + t.toString());
     }
 
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
 
     try {
-      int updates =
+      final int updates =
           runner.update(connection, UPDATE_TRIGGER, t.getSource(),
               t.getLastModifyTime(), encType.getNumVal(), data,
               t.getTriggerId());
@@ -244,23 +234,64 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
           logger.debug("Updated " + updates + " records.");
         }
       }
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       logger.error(UPDATE_TRIGGER + " failed.");
       throw new TriggerLoaderException("Update trigger " + t.toString()
           + " into db failed. ", e);
     }
   }
 
+  private Connection getConnection() throws TriggerLoaderException {
+    Connection connection = null;
+    try {
+      connection = super.getDBConnection(false);
+    } catch (final Exception e) {
+      DbUtils.closeQuietly(connection);
+      throw new TriggerLoaderException("Error getting DB connection.", e);
+    }
+
+    return connection;
+  }
+
+  @Override
+  public Trigger loadTrigger(final int triggerId) throws TriggerLoaderException {
+    logger.info("Loading trigger " + triggerId + " from db.");
+    final Connection connection = getConnection();
+
+    final QueryRunner runner = new QueryRunner();
+    final ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
+
+    List<Trigger> triggers;
+
+    try {
+      triggers = runner.query(connection, GET_TRIGGER, handler, triggerId);
+    } catch (final SQLException e) {
+      logger.error(GET_TRIGGER + " failed.");
+      throw new TriggerLoaderException("Loading trigger from db failed. ", e);
+    } finally {
+      DbUtils.closeQuietly(connection);
+    }
+
+    if (triggers.size() == 0) {
+      logger.error("Loaded 0 triggers. Failed to load trigger " + triggerId);
+      throw new TriggerLoaderException(
+          "Loaded 0 triggers. Failed to load trigger " + triggerId);
+    }
+
+    return triggers.get(0);
+  }
+
   private static class LastInsertID implements ResultSetHandler<Long> {
-    private static String LAST_INSERT_ID = "SELECT LAST_INSERT_ID()";
+
+    private static final String LAST_INSERT_ID = "SELECT LAST_INSERT_ID()";
 
     @Override
-    public Long handle(ResultSet rs) throws SQLException {
+    public Long handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
         return -1L;
       }
 
-      long id = rs.getLong(1);
+      final long id = rs.getLong(1);
       return id;
     }
 
@@ -269,33 +300,33 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
   public static class TriggerResultHandler implements ResultSetHandler<List<Trigger>> {
 
     @Override
-    public List<Trigger> handle(ResultSet rs) throws SQLException {
+    public List<Trigger> handle(final ResultSet rs) throws SQLException {
       if (!rs.next()) {
-        return Collections.<Trigger> emptyList();
+        return Collections.<Trigger>emptyList();
       }
 
-      ArrayList<Trigger> triggers = new ArrayList<Trigger>();
+      final ArrayList<Trigger> triggers = new ArrayList<>();
       do {
-        int triggerId = rs.getInt(1);
-        int encodingType = rs.getInt(4);
-        byte[] data = rs.getBytes(5);
+        final int triggerId = rs.getInt(1);
+        final int encodingType = rs.getInt(4);
+        final byte[] data = rs.getBytes(5);
 
         Object jsonObj = null;
         if (data != null) {
-          EncodingType encType = EncodingType.fromInteger(encodingType);
+          final EncodingType encType = EncodingType.fromInteger(encodingType);
 
           try {
             // Convoluted way to inflate strings. Should find common package or
             // helper function.
             if (encType == EncodingType.GZIP) {
               // Decompress the sucker.
-              String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
+              final String jsonString = GZIPUtils.unGzipString(data, "UTF-8");
               jsonObj = JSONUtils.parseJSONFromString(jsonString);
             } else {
-              String jsonString = new String(data, "UTF-8");
+              final String jsonString = new String(data, "UTF-8");
               jsonObj = JSONUtils.parseJSONFromString(jsonString);
             }
-          } catch (IOException e) {
+          } catch (final IOException e) {
             throw new SQLException("Error reconstructing trigger data ");
           }
         }
@@ -304,7 +335,7 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
         try {
           t = Trigger.fromJson(jsonObj);
           triggers.add(t);
-        } catch (Exception e) {
+        } catch (final Exception e) {
           e.printStackTrace();
           logger.error("Failed to load trigger " + triggerId);
         }
@@ -315,44 +346,4 @@ public class JdbcTriggerLoader extends AbstractJdbcLoader implements
 
   }
 
-  private Connection getConnection() throws TriggerLoaderException {
-    Connection connection = null;
-    try {
-      connection = super.getDBConnection(false);
-    } catch (Exception e) {
-      DbUtils.closeQuietly(connection);
-      throw new TriggerLoaderException("Error getting DB connection.", e);
-    }
-
-    return connection;
-  }
-
-  @Override
-  public Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
-    logger.info("Loading trigger " + triggerId + " from db.");
-    Connection connection = getConnection();
-
-    QueryRunner runner = new QueryRunner();
-    ResultSetHandler<List<Trigger>> handler = new TriggerResultHandler();
-
-    List<Trigger> triggers;
-
-    try {
-      triggers = runner.query(connection, GET_TRIGGER, handler, triggerId);
-    } catch (SQLException e) {
-      logger.error(GET_TRIGGER + " failed.");
-      throw new TriggerLoaderException("Loading trigger from db failed. ", e);
-    } finally {
-      DbUtils.closeQuietly(connection);
-    }
-
-    if (triggers.size() == 0) {
-      logger.error("Loaded 0 triggers. Failed to load trigger " + triggerId);
-      throw new TriggerLoaderException(
-          "Loaded 0 triggers. Failed to load trigger " + triggerId);
-    }
-
-    return triggers.get(0);
-  }
-
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/TriggerAgent.java b/azkaban-common/src/main/java/azkaban/trigger/TriggerAgent.java
index b779e60..3d3b53c 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/TriggerAgent.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/TriggerAgent.java
@@ -19,6 +19,7 @@ package azkaban.trigger;
 import azkaban.utils.Props;
 
 public interface TriggerAgent {
+
   public void loadTriggerFromProps(Props props) throws Exception;
 
   public String getTriggerSource();
diff --git a/azkaban-common/src/main/java/azkaban/trigger/TriggerException.java b/azkaban-common/src/main/java/azkaban/trigger/TriggerException.java
index 8b01478..0767516 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/TriggerException.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/TriggerException.java
@@ -17,17 +17,18 @@
 package azkaban.trigger;
 
 public class TriggerException extends Exception {
+
   private static final long serialVersionUID = 1L;
 
-  public TriggerException(String message) {
+  public TriggerException(final String message) {
     super(message);
   }
 
-  public TriggerException(String message, Throwable cause) {
+  public TriggerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
-  public TriggerException(Throwable e) {
+  public TriggerException(final Throwable e) {
     super(e);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/TriggerLoaderException.java b/azkaban-common/src/main/java/azkaban/trigger/TriggerLoaderException.java
index aa40fb9..50e598d 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/TriggerLoaderException.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/TriggerLoaderException.java
@@ -17,17 +17,18 @@
 package azkaban.trigger;
 
 public class TriggerLoaderException extends Exception {
+
   private static final long serialVersionUID = 1L;
 
-  public TriggerLoaderException(String message) {
+  public TriggerLoaderException(final String message) {
     super(message);
   }
 
-  public TriggerLoaderException(String message, Throwable cause) {
+  public TriggerLoaderException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
-  public TriggerLoaderException(Throwable e) {
+  public TriggerLoaderException(final Throwable e) {
     super(e);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/TriggerManager.java b/azkaban-common/src/main/java/azkaban/trigger/TriggerManager.java
index 56637f8..2051117 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/TriggerManager.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/TriggerManager.java
@@ -17,7 +17,13 @@
 package azkaban.trigger;
 
 import azkaban.ServiceProvider;
-import com.google.inject.Inject;
+import azkaban.event.Event;
+import azkaban.event.Event.Type;
+import azkaban.event.EventHandler;
+import azkaban.event.EventListener;
+import azkaban.executor.ExecutableFlow;
+import azkaban.executor.ExecutorManager;
+import azkaban.utils.Props;
 import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.HashMap;
@@ -28,67 +34,53 @@ import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.PriorityBlockingQueue;
-
 import org.apache.log4j.Logger;
 
-import azkaban.event.Event;
-import azkaban.event.EventHandler;
-import azkaban.event.EventListener;
-import azkaban.event.Event.Type;
-import azkaban.executor.ExecutableFlow;
-import azkaban.executor.ExecutorManager;
-import azkaban.utils.Props;
-
 public class TriggerManager extends EventHandler implements
     TriggerManagerAdapter {
-  private static Logger logger = Logger.getLogger(TriggerManager.class);
-  public static final long DEFAULT_SCANNER_INTERVAL_MS = 60000;
-
-  private static Map<Integer, Trigger> triggerIdMap =
-      new ConcurrentHashMap<Integer, Trigger>();
-
-  private CheckerTypeLoader checkerTypeLoader;
-  private ActionTypeLoader actionTypeLoader;
-  private TriggerLoader triggerLoader;
 
+  public static final long DEFAULT_SCANNER_INTERVAL_MS = 60000;
+  private static final Logger logger = Logger.getLogger(TriggerManager.class);
+  private static final Map<Integer, Trigger> triggerIdMap =
+      new ConcurrentHashMap<>();
   private final TriggerScannerThread runnerThread;
+  private final Object syncObj = new Object();
+  private final CheckerTypeLoader checkerTypeLoader;
+  private final ActionTypeLoader actionTypeLoader;
+  private final TriggerLoader triggerLoader;
+  private final LocalTriggerJMX jmxStats = new LocalTriggerJMX();
+  private final ExecutorManagerEventListener listener =
+      new ExecutorManagerEventListener();
   private long lastRunnerThreadCheckTime = -1;
   private long runnerThreadIdleTime = -1;
-  private LocalTriggerJMX jmxStats = new LocalTriggerJMX();
-
-  private ExecutorManagerEventListener listener =
-      new ExecutorManagerEventListener();
-
-  private final Object syncObj = new Object();
-
   private String scannerStage = "";
 
   // TODO kunkun-tang: Before apply guice to this class, we should make
   // ExecutorManager guiceable.
-  public TriggerManager(Props props, TriggerLoader triggerLoader,
-      ExecutorManager executorManager) throws TriggerManagerException {
+  public TriggerManager(final Props props, final TriggerLoader triggerLoader,
+      final ExecutorManager executorManager) throws TriggerManagerException {
 
     // TODO kunkun-tang: Doing hack here to allow calling new azkaban-db code. Should fix in future.
     this.triggerLoader = ServiceProvider.SERVICE_PROVIDER.getInstance(TriggerLoader.class);
 
-    long scannerInterval =
+    final long scannerInterval =
         props.getLong("trigger.scan.interval", DEFAULT_SCANNER_INTERVAL_MS);
-    runnerThread = new TriggerScannerThread(scannerInterval);
+    this.runnerThread = new TriggerScannerThread(scannerInterval);
 
-    checkerTypeLoader = new CheckerTypeLoader();
-    actionTypeLoader = new ActionTypeLoader();
+    this.checkerTypeLoader = new CheckerTypeLoader();
+    this.actionTypeLoader = new ActionTypeLoader();
 
     try {
-      checkerTypeLoader.init(props);
-      actionTypeLoader.init(props);
-    } catch (Exception e) {
+      this.checkerTypeLoader.init(props);
+      this.actionTypeLoader.init(props);
+    } catch (final Exception e) {
       throw new TriggerManagerException(e);
     }
 
-    Condition.setCheckerLoader(checkerTypeLoader);
-    Trigger.setActionTypeLoader(actionTypeLoader);
+    Condition.setCheckerLoader(this.checkerTypeLoader);
+    Trigger.setActionTypeLoader(this.actionTypeLoader);
 
-    executorManager.addListener(listener);
+    executorManager.addListener(this.listener);
 
     logger.info("TriggerManager loaded.");
   }
@@ -98,152 +90,239 @@ public class TriggerManager extends EventHandler implements
 
     try {
       // expect loader to return valid triggers
-      List<Trigger> triggers = triggerLoader.loadTriggers();
-      for (Trigger t : triggers) {
-        runnerThread.addTrigger(t);
+      final List<Trigger> triggers = this.triggerLoader.loadTriggers();
+      for (final Trigger t : triggers) {
+        this.runnerThread.addTrigger(t);
         triggerIdMap.put(t.getTriggerId(), t);
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e);
       throw new TriggerManagerException(e);
     }
 
-    runnerThread.start();
+    this.runnerThread.start();
   }
 
   protected CheckerTypeLoader getCheckerLoader() {
-    return checkerTypeLoader;
+    return this.checkerTypeLoader;
   }
 
   protected ActionTypeLoader getActionLoader() {
-    return actionTypeLoader;
+    return this.actionTypeLoader;
   }
 
-  public void insertTrigger(Trigger t) throws TriggerManagerException {
+  public void insertTrigger(final Trigger t) throws TriggerManagerException {
     logger.info("Inserting trigger " + t + " in TriggerManager");
-    synchronized (syncObj) {
+    synchronized (this.syncObj) {
       try {
-        triggerLoader.addTrigger(t);
-      } catch (TriggerLoaderException e) {
+        this.triggerLoader.addTrigger(t);
+      } catch (final TriggerLoaderException e) {
         throw new TriggerManagerException(e);
       }
-      runnerThread.addTrigger(t);
+      this.runnerThread.addTrigger(t);
       triggerIdMap.put(t.getTriggerId(), t);
     }
   }
 
-  public void removeTrigger(int id) throws TriggerManagerException {
+  public void removeTrigger(final int id) throws TriggerManagerException {
     logger.info("Removing trigger with id: " + id + " from TriggerManager");
-    synchronized (syncObj) {
-      Trigger t = triggerIdMap.get(id);
+    synchronized (this.syncObj) {
+      final Trigger t = triggerIdMap.get(id);
       if (t != null) {
         removeTrigger(triggerIdMap.get(id));
       }
     }
   }
 
-  public void updateTrigger(Trigger t) throws TriggerManagerException {
+  public void updateTrigger(final Trigger t) throws TriggerManagerException {
     logger.info("Updating trigger " + t + " in TriggerManager");
-    synchronized (syncObj) {
-      runnerThread.deleteTrigger(triggerIdMap.get(t.getTriggerId()));
-      runnerThread.addTrigger(t);
+    synchronized (this.syncObj) {
+      this.runnerThread.deleteTrigger(triggerIdMap.get(t.getTriggerId()));
+      this.runnerThread.addTrigger(t);
       triggerIdMap.put(t.getTriggerId(), t);
     }
   }
 
-  public void removeTrigger(Trigger t) throws TriggerManagerException {
+  public void removeTrigger(final Trigger t) throws TriggerManagerException {
     logger.info("Removing trigger " + t + " from TriggerManager");
-    synchronized (syncObj) {
-      runnerThread.deleteTrigger(t);
+    synchronized (this.syncObj) {
+      this.runnerThread.deleteTrigger(t);
       triggerIdMap.remove(t.getTriggerId());
       try {
         t.stopCheckers();
-        triggerLoader.removeTrigger(t);
-      } catch (TriggerLoaderException e) {
+        this.triggerLoader.removeTrigger(t);
+      } catch (final TriggerLoaderException e) {
         throw new TriggerManagerException(e);
       }
     }
   }
 
   public List<Trigger> getTriggers() {
-    return new ArrayList<Trigger>(triggerIdMap.values());
+    return new ArrayList<>(triggerIdMap.values());
   }
 
   public Map<String, Class<? extends ConditionChecker>> getSupportedCheckers() {
-    return checkerTypeLoader.getSupportedCheckers();
+    return this.checkerTypeLoader.getSupportedCheckers();
+  }
+
+  public Trigger getTrigger(final int triggerId) {
+    synchronized (this.syncObj) {
+      return triggerIdMap.get(triggerId);
+    }
+  }
+
+  public void expireTrigger(final int triggerId) {
+    final Trigger t = getTrigger(triggerId);
+    t.setStatus(TriggerStatus.EXPIRED);
+  }
+
+  @Override
+  public List<Trigger> getTriggers(final String triggerSource) {
+    final List<Trigger> triggers = new ArrayList<>();
+    for (final Trigger t : triggerIdMap.values()) {
+      if (t.getSource().equals(triggerSource)) {
+        triggers.add(t);
+      }
+    }
+    return triggers;
+  }
+
+  @Override
+  public List<Trigger> getTriggerUpdates(final String triggerSource,
+      final long lastUpdateTime) throws TriggerManagerException {
+    final List<Trigger> triggers = new ArrayList<>();
+    for (final Trigger t : triggerIdMap.values()) {
+      if (t.getSource().equals(triggerSource)
+          && t.getLastModifyTime() > lastUpdateTime) {
+        triggers.add(t);
+      }
+    }
+    return triggers;
+  }
+
+  @Override
+  public List<Trigger> getAllTriggerUpdates(final long lastUpdateTime)
+      throws TriggerManagerException {
+    final List<Trigger> triggers = new ArrayList<>();
+    for (final Trigger t : triggerIdMap.values()) {
+      if (t.getLastModifyTime() > lastUpdateTime) {
+        triggers.add(t);
+      }
+    }
+    return triggers;
+  }
+
+  @Override
+  public void insertTrigger(final Trigger t, final String user)
+      throws TriggerManagerException {
+    insertTrigger(t);
+  }
+
+  @Override
+  public void removeTrigger(final int id, final String user) throws TriggerManagerException {
+    removeTrigger(id);
+  }
+
+  @Override
+  public void updateTrigger(final Trigger t, final String user)
+      throws TriggerManagerException {
+    updateTrigger(t);
+  }
+
+  @Override
+  public void shutdown() {
+    this.runnerThread.shutdown();
+  }
+
+  @Override
+  public TriggerJMX getJMX() {
+    return this.jmxStats;
+  }
+
+  @Override
+  public void registerCheckerType(final String name,
+      final Class<? extends ConditionChecker> checker) {
+    this.checkerTypeLoader.registerCheckerType(name, checker);
+  }
+
+  @Override
+  public void registerActionType(final String name,
+      final Class<? extends TriggerAction> action) {
+    this.actionTypeLoader.registerActionType(name, action);
   }
 
   private class TriggerScannerThread extends Thread {
-    private BlockingQueue<Trigger> triggers;
-    private Map<Integer, ExecutableFlow> justFinishedFlows;
-    private boolean shutdown = false;
+
     private final long scannerInterval;
+    private final BlockingQueue<Trigger> triggers;
+    private final Map<Integer, ExecutableFlow> justFinishedFlows;
+    private boolean shutdown = false;
 
-    public TriggerScannerThread(long scannerInterval) {
-      triggers = new PriorityBlockingQueue<Trigger>(1, new TriggerComparator());
-      justFinishedFlows = new ConcurrentHashMap<Integer, ExecutableFlow>();
+    public TriggerScannerThread(final long scannerInterval) {
+      this.triggers = new PriorityBlockingQueue<>(1, new TriggerComparator());
+      this.justFinishedFlows = new ConcurrentHashMap<>();
       this.setName("TriggerRunnerManager-Trigger-Scanner-Thread");
       this.scannerInterval = scannerInterval;
     }
 
     public void shutdown() {
       logger.error("Shutting down trigger manager thread " + this.getName());
-      shutdown = true;
+      this.shutdown = true;
       this.interrupt();
     }
 
-    public void addJustFinishedFlow(ExecutableFlow flow) {
-      synchronized (syncObj) {
-        justFinishedFlows.put(flow.getExecutionId(), flow);
+    public void addJustFinishedFlow(final ExecutableFlow flow) {
+      synchronized (TriggerManager.this.syncObj) {
+        this.justFinishedFlows.put(flow.getExecutionId(), flow);
       }
     }
 
-    public void addTrigger(Trigger t) {
-      synchronized (syncObj) {
+    public void addTrigger(final Trigger t) {
+      synchronized (TriggerManager.this.syncObj) {
         t.updateNextCheckTime();
-        triggers.add(t);
+        this.triggers.add(t);
       }
     }
 
-    public void deleteTrigger(Trigger t) {
-      triggers.remove(t);
+    public void deleteTrigger(final Trigger t) {
+      this.triggers.remove(t);
     }
 
     @Override
     public void run() {
-      while (!shutdown) {
-        synchronized (syncObj) {
+      while (!this.shutdown) {
+        synchronized (TriggerManager.this.syncObj) {
           try {
-            lastRunnerThreadCheckTime = System.currentTimeMillis();
+            TriggerManager.this.lastRunnerThreadCheckTime = System.currentTimeMillis();
 
-            scannerStage =
+            TriggerManager.this.scannerStage =
                 "Ready to start a new scan cycle at "
-                    + lastRunnerThreadCheckTime;
+                    + TriggerManager.this.lastRunnerThreadCheckTime;
 
             try {
               checkAllTriggers();
-              justFinishedFlows.clear();
-            } catch (Exception e) {
+              this.justFinishedFlows.clear();
+            } catch (final Exception e) {
               e.printStackTrace();
               logger.error(e.getMessage());
-            } catch (Throwable t) {
+            } catch (final Throwable t) {
               t.printStackTrace();
               logger.error(t.getMessage());
             }
 
-            scannerStage = "Done flipping all triggers.";
+            TriggerManager.this.scannerStage = "Done flipping all triggers.";
 
-            runnerThreadIdleTime =
-                scannerInterval
-                    - (System.currentTimeMillis() - lastRunnerThreadCheckTime);
+            TriggerManager.this.runnerThreadIdleTime =
+                this.scannerInterval
+                    - (System.currentTimeMillis() - TriggerManager.this.lastRunnerThreadCheckTime);
 
-            if (runnerThreadIdleTime < 0) {
+            if (TriggerManager.this.runnerThreadIdleTime < 0) {
               logger.error("Trigger manager thread " + this.getName()
                   + " is too busy!");
             } else {
-              syncObj.wait(runnerThreadIdleTime);
+              TriggerManager.this.syncObj.wait(TriggerManager.this.runnerThreadIdleTime);
             }
-          } catch (InterruptedException e) {
+          } catch (final InterruptedException e) {
             logger.info("Interrupted. Probably to shut down.");
           }
         }
@@ -252,9 +331,9 @@ public class TriggerManager extends EventHandler implements
 
     private void checkAllTriggers() throws TriggerManagerException {
       // sweep through the rest of them
-      for (Trigger t : triggers) {
+      for (final Trigger t : this.triggers) {
         try {
-          scannerStage = "Checking for trigger " + t.getTriggerId();
+          TriggerManager.this.scannerStage = "Checking for trigger " + t.getTriggerId();
 
           if (t.getStatus().equals(TriggerStatus.READY)) {
             if (t.triggerConditionMet()) {
@@ -268,22 +347,22 @@ public class TriggerManager extends EventHandler implements
           } else {
             t.updateNextCheckTime();
           }
-        } catch (Throwable th) {
+        } catch (final Throwable th) {
           //skip this trigger, moving on to the next one
           logger.error("Failed to process trigger with id : " + t, th);
         }
       }
     }
 
-    private void onTriggerTrigger(Trigger t) throws TriggerManagerException {
-      List<TriggerAction> actions = t.getTriggerActions();
-      for (TriggerAction action : actions) {
+    private void onTriggerTrigger(final Trigger t) throws TriggerManagerException {
+      final List<TriggerAction> actions = t.getTriggerActions();
+      for (final TriggerAction action : actions) {
         try {
           logger.info("Doing trigger actions " + action.getDescription() + " for " + t);
           action.doAction();
-        } catch (Exception e) {
+        } catch (final Exception e) {
           logger.error("Failed to do action " + action.getDescription() + " for " + t, e);
-        } catch (Throwable th) {
+        } catch (final Throwable th) {
           logger.error("Failed to do action " + action.getDescription() + " for " + t, th);
         }
       }
@@ -294,21 +373,21 @@ public class TriggerManager extends EventHandler implements
         t.setStatus(TriggerStatus.EXPIRED);
       }
       try {
-        triggerLoader.updateTrigger(t);
-      } catch (TriggerLoaderException e) {
+        TriggerManager.this.triggerLoader.updateTrigger(t);
+      } catch (final TriggerLoaderException e) {
         throw new TriggerManagerException(e);
       }
     }
 
-    private void onTriggerExpire(Trigger t) throws TriggerManagerException {
-      List<TriggerAction> expireActions = t.getExpireActions();
-      for (TriggerAction action : expireActions) {
+    private void onTriggerExpire(final Trigger t) throws TriggerManagerException {
+      final List<TriggerAction> expireActions = t.getExpireActions();
+      for (final TriggerAction action : expireActions) {
         try {
-          logger.info("Doing expire actions for "+ action.getDescription() + " for " + t);
+          logger.info("Doing expire actions for " + action.getDescription() + " for " + t);
           action.doAction();
-        } catch (Exception e) {
+        } catch (final Exception e) {
           logger.error("Failed to do expire action " + action.getDescription() + " for " + t, e);
-        } catch (Throwable th) {
+        } catch (final Throwable th) {
           logger.error("Failed to do expire action " + action.getDescription() + " for " + t, th);
         }
       }
@@ -319,17 +398,18 @@ public class TriggerManager extends EventHandler implements
         t.setStatus(TriggerStatus.EXPIRED);
       }
       try {
-        triggerLoader.updateTrigger(t);
-      } catch (TriggerLoaderException e) {
+        TriggerManager.this.triggerLoader.updateTrigger(t);
+      } catch (final TriggerLoaderException e) {
         throw new TriggerManagerException(e);
       }
     }
 
     private class TriggerComparator implements Comparator<Trigger> {
+
       @Override
-      public int compare(Trigger arg0, Trigger arg1) {
-        long first = arg1.getNextCheckTime();
-        long second = arg0.getNextCheckTime();
+      public int compare(final Trigger arg0, final Trigger arg1) {
+        final long first = arg1.getNextCheckTime();
+        final long second = arg0.getNextCheckTime();
 
         if (first == second) {
           return 0;
@@ -341,90 +421,16 @@ public class TriggerManager extends EventHandler implements
     }
   }
 
-  public Trigger getTrigger(int triggerId) {
-    synchronized (syncObj) {
-      return triggerIdMap.get(triggerId);
-    }
-  }
-
-  public void expireTrigger(int triggerId) {
-    Trigger t = getTrigger(triggerId);
-    t.setStatus(TriggerStatus.EXPIRED);
-  }
-
-  @Override
-  public List<Trigger> getTriggers(String triggerSource) {
-    List<Trigger> triggers = new ArrayList<Trigger>();
-    for (Trigger t : triggerIdMap.values()) {
-      if (t.getSource().equals(triggerSource)) {
-        triggers.add(t);
-      }
-    }
-    return triggers;
-  }
-
-  @Override
-  public List<Trigger> getTriggerUpdates(String triggerSource,
-      long lastUpdateTime) throws TriggerManagerException {
-    List<Trigger> triggers = new ArrayList<Trigger>();
-    for (Trigger t : triggerIdMap.values()) {
-      if (t.getSource().equals(triggerSource)
-          && t.getLastModifyTime() > lastUpdateTime) {
-        triggers.add(t);
-      }
-    }
-    return triggers;
-  }
-
-  @Override
-  public List<Trigger> getAllTriggerUpdates(long lastUpdateTime)
-      throws TriggerManagerException {
-    List<Trigger> triggers = new ArrayList<Trigger>();
-    for (Trigger t : triggerIdMap.values()) {
-      if (t.getLastModifyTime() > lastUpdateTime) {
-        triggers.add(t);
-      }
-    }
-    return triggers;
-  }
-
-  @Override
-  public void insertTrigger(Trigger t, String user)
-      throws TriggerManagerException {
-    insertTrigger(t);
-  }
-
-  @Override
-  public void removeTrigger(int id, String user) throws TriggerManagerException {
-    removeTrigger(id);
-  }
-
-  @Override
-  public void updateTrigger(Trigger t, String user)
-      throws TriggerManagerException {
-    updateTrigger(t);
-  }
-
-  @Override
-  public void shutdown() {
-    runnerThread.shutdown();
-  }
-
-  @Override
-  public TriggerJMX getJMX() {
-    return this.jmxStats;
-  }
-
   private class LocalTriggerJMX implements TriggerJMX {
 
     @Override
     public long getLastRunnerThreadCheckTime() {
-      return lastRunnerThreadCheckTime;
+      return TriggerManager.this.lastRunnerThreadCheckTime;
     }
 
     @Override
     public boolean isRunnerThreadActive() {
-      return runnerThread.isAlive();
+      return TriggerManager.this.runnerThread.isAlive();
     }
 
     @Override
@@ -439,8 +445,8 @@ public class TriggerManager extends EventHandler implements
 
     @Override
     public String getTriggerSources() {
-      Set<String> sources = new HashSet<String>();
-      for (Trigger t : triggerIdMap.values()) {
+      final Set<String> sources = new HashSet<>();
+      for (final Trigger t : triggerIdMap.values()) {
         sources.add(t.getSource());
       }
       return sources.toString();
@@ -453,45 +459,34 @@ public class TriggerManager extends EventHandler implements
 
     @Override
     public long getScannerIdleTime() {
-      return runnerThreadIdleTime;
+      return TriggerManager.this.runnerThreadIdleTime;
     }
 
     @Override
     public Map<String, Object> getAllJMXMbeans() {
-      return new HashMap<String, Object>();
+      return new HashMap<>();
     }
 
     @Override
     public String getScannerThreadStage() {
-      return scannerStage;
+      return TriggerManager.this.scannerStage;
     }
 
   }
 
-  @Override
-  public void registerCheckerType(String name,
-      Class<? extends ConditionChecker> checker) {
-    checkerTypeLoader.registerCheckerType(name, checker);
-  }
-
-  @Override
-  public void registerActionType(String name,
-      Class<? extends TriggerAction> action) {
-    actionTypeLoader.registerActionType(name, action);
-  }
-
   private class ExecutorManagerEventListener implements EventListener {
+
     public ExecutorManagerEventListener() {
     }
 
     @Override
-    public void handleEvent(Event event) {
+    public void handleEvent(final Event event) {
       // this needs to be fixed for perf
-      synchronized (syncObj) {
-        ExecutableFlow flow = (ExecutableFlow) event.getRunner();
+      synchronized (TriggerManager.this.syncObj) {
+        final ExecutableFlow flow = (ExecutableFlow) event.getRunner();
         if (event.getType() == Type.FLOW_FINISHED) {
           logger.info("Flow finish event received. " + flow.getExecutionId());
-          runnerThread.addJustFinishedFlow(flow);
+          TriggerManager.this.runnerThread.addJustFinishedFlow(flow);
         }
       }
     }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerAdapter.java b/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerAdapter.java
index 64207fd..d4aa51a 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerAdapter.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerAdapter.java
@@ -50,6 +50,7 @@ public interface TriggerManagerAdapter {
   public TriggerJMX getJMX();
 
   public interface TriggerJMX {
+
     public long getLastRunnerThreadCheckTime();
 
     public boolean isRunnerThreadActive();
diff --git a/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerException.java b/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerException.java
index 741a2c4..9799a60 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerException.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/TriggerManagerException.java
@@ -17,17 +17,18 @@
 package azkaban.trigger;
 
 public class TriggerManagerException extends Exception {
+
   private static final long serialVersionUID = 1L;
 
-  public TriggerManagerException(String message) {
+  public TriggerManagerException(final String message) {
     super(message);
   }
 
-  public TriggerManagerException(String message, Throwable cause) {
+  public TriggerManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
-  public TriggerManagerException(Throwable e) {
+  public TriggerManagerException(final Throwable e) {
     super(e);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/trigger/TriggerStatus.java b/azkaban-common/src/main/java/azkaban/trigger/TriggerStatus.java
index d213ebb..3c5e62b 100644
--- a/azkaban-common/src/main/java/azkaban/trigger/TriggerStatus.java
+++ b/azkaban-common/src/main/java/azkaban/trigger/TriggerStatus.java
@@ -21,11 +21,11 @@ public enum TriggerStatus {
 
   private final int numVal;
 
-  TriggerStatus(int numVal) {
+  TriggerStatus(final int numVal) {
     this.numVal = numVal;
   }
 
   public int getNumVal() {
-    return numVal;
+    return this.numVal;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/user/Permission.java b/azkaban-common/src/main/java/azkaban/user/Permission.java
index 8e16ac4..834a2c1 100644
--- a/azkaban-common/src/main/java/azkaban/user/Permission.java
+++ b/azkaban-common/src/main/java/azkaban/user/Permission.java
@@ -16,52 +16,32 @@
 
 package azkaban.user;
 
+import azkaban.utils.Utils;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.Set;
 
-import azkaban.utils.Utils;
-
 public class Permission {
-  public enum Type {
-    READ(0x0000001),
-    WRITE(0x0000002),
-    EXECUTE(0x0000004),
-    SCHEDULE(0x0000008),
-    METRICS(0x0000010),
-    CREATEPROJECTS(0x40000000), // Only used for roles
-    ADMIN(0x8000000);
 
-    private final int numVal;
-
-    Type(int numVal) {
-      this.numVal = numVal;
-    }
-
-    public int getFlag() {
-      return numVal;
-    }
-  }
-
-  private Set<Type> permissions = new HashSet<Type>();
+  private final Set<Type> permissions = new HashSet<>();
 
   public Permission() {
   }
 
-  public Permission(int flags) {
+  public Permission(final int flags) {
     setPermissions(flags);
   }
 
-  public Permission(Type... list) {
+  public Permission(final Type... list) {
     addPermission(list);
   }
 
-  public void addPermissions(Permission perm) {
+  public void addPermissions(final Permission perm) {
     this.permissions.addAll(perm.getTypes());
   }
 
-  public void setPermission(Type type, boolean set) {
+  public void setPermission(final Type type, final boolean set) {
     if (set) {
       addPermission(type);
     } else {
@@ -69,12 +49,12 @@ public class Permission {
     }
   }
 
-  public void setPermissions(int flags) {
-    permissions.clear();
+  public void setPermissions(final int flags) {
+    this.permissions.clear();
     if ((flags & Type.ADMIN.getFlag()) != 0) {
       addPermission(Type.ADMIN);
     } else {
-      for (Type type : Type.values()) {
+      for (final Type type : Type.values()) {
         if ((flags & type.getFlag()) != 0) {
           addPermission(type);
         }
@@ -82,79 +62,76 @@ public class Permission {
     }
   }
 
-  public void addPermission(Type... list) {
+  public void addPermission(final Type... list) {
     // Admin is all encompassing permission. No need to add other types
-    if (!permissions.contains(Type.ADMIN)) {
-      for (Type perm : list) {
-        permissions.add(perm);
+    if (!this.permissions.contains(Type.ADMIN)) {
+      for (final Type perm : list) {
+        this.permissions.add(perm);
       }
       // We add everything, and if there's Admin left, we make sure that only
       // Admin is remaining.
-      if (permissions.contains(Type.ADMIN)) {
-        permissions.clear();
-        permissions.add(Type.ADMIN);
+      if (this.permissions.contains(Type.ADMIN)) {
+        this.permissions.clear();
+        this.permissions.add(Type.ADMIN);
       }
     }
   }
 
-  public void addPermissionsByName(String... list) {
-    for (String perm : list) {
-      Type type = Type.valueOf(perm);
+  public void addPermissionsByName(final String... list) {
+    for (final String perm : list) {
+      final Type type = Type.valueOf(perm);
       if (type != null) {
         addPermission(type);
       }
-      ;
     }
   }
 
-  public void addPermissions(Collection<Type> list) {
-    for (Type perm : list) {
+  public void addPermissions(final Collection<Type> list) {
+    for (final Type perm : list) {
       addPermission(perm);
     }
   }
 
-  public void addPermissionsByName(Collection<String> list) {
-    for (String perm : list) {
-      Type type = Type.valueOf(perm);
+  public void addPermissionsByName(final Collection<String> list) {
+    for (final String perm : list) {
+      final Type type = Type.valueOf(perm);
       if (type != null) {
         addPermission(type);
       }
-      ;
     }
   }
 
   public Set<Type> getTypes() {
-    return permissions;
+    return this.permissions;
   }
 
-  public void removePermissions(Type... list) {
-    for (Type perm : list) {
-      permissions.remove(perm);
+  public void removePermissions(final Type... list) {
+    for (final Type perm : list) {
+      this.permissions.remove(perm);
     }
   }
 
-  public void removePermissionsByName(String... list) {
-    for (String perm : list) {
-      Type type = Type.valueOf(perm);
+  public void removePermissionsByName(final String... list) {
+    for (final String perm : list) {
+      final Type type = Type.valueOf(perm);
       if (type != null) {
-        permissions.remove(type);
+        this.permissions.remove(type);
       }
-      ;
     }
   }
 
-  public boolean isPermissionSet(Type permission) {
-    return permissions.contains(permission);
+  public boolean isPermissionSet(final Type permission) {
+    return this.permissions.contains(permission);
   }
 
-  public boolean isPermissionNameSet(String permission) {
-    return permissions.contains(Type.valueOf(permission));
+  public boolean isPermissionNameSet(final String permission) {
+    return this.permissions.contains(Type.valueOf(permission));
   }
 
   public String[] toStringArray() {
-    ArrayList<String> list = new ArrayList<String>();
+    final ArrayList<String> list = new ArrayList<>();
     int count = 0;
-    for (Type type : permissions) {
+    for (final Type type : this.permissions) {
       list.add(type.toString());
       count++;
     }
@@ -164,7 +141,7 @@ public class Permission {
 
   @Override
   public String toString() {
-    return Utils.flattenToString(permissions, ",");
+    return Utils.flattenToString(this.permissions, ",");
   }
 
   @Override
@@ -172,33 +149,58 @@ public class Permission {
     final int prime = 31;
     int result = 1;
     result =
-        prime * result + ((permissions == null) ? 0 : permissions.hashCode());
+        prime * result + ((this.permissions == null) ? 0 : this.permissions.hashCode());
     return result;
   }
 
   @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
+  public boolean equals(final Object obj) {
+    if (this == obj) {
       return true;
-    if (obj == null)
+    }
+    if (obj == null) {
       return false;
-    if (getClass() != obj.getClass())
+    }
+    if (getClass() != obj.getClass()) {
       return false;
-    Permission other = (Permission) obj;
-    if (permissions == null) {
-      if (other.permissions != null)
+    }
+    final Permission other = (Permission) obj;
+    if (this.permissions == null) {
+      if (other.permissions != null) {
         return false;
-    } else if (!permissions.equals(other.permissions))
+      }
+    } else if (!this.permissions.equals(other.permissions)) {
       return false;
+    }
     return true;
   }
 
   public int toFlags() {
     int flag = 0;
-    for (Type type : permissions) {
+    for (final Type type : this.permissions) {
       flag |= type.getFlag();
     }
 
     return flag;
   }
+
+  public enum Type {
+    READ(0x0000001),
+    WRITE(0x0000002),
+    EXECUTE(0x0000004),
+    SCHEDULE(0x0000008),
+    METRICS(0x0000010),
+    CREATEPROJECTS(0x40000000), // Only used for roles
+    ADMIN(0x8000000);
+
+    private final int numVal;
+
+    Type(final int numVal) {
+      this.numVal = numVal;
+    }
+
+    public int getFlag() {
+      return this.numVal;
+    }
+  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/user/Role.java b/azkaban-common/src/main/java/azkaban/user/Role.java
index 683595d..41792a3 100644
--- a/azkaban-common/src/main/java/azkaban/user/Role.java
+++ b/azkaban-common/src/main/java/azkaban/user/Role.java
@@ -17,24 +17,25 @@
 package azkaban.user;
 
 public class Role {
+
   private final String name;
   private final Permission globalPermission;
 
-  public Role(String name, Permission permission) {
+  public Role(final String name, final Permission permission) {
     this.name = name;
     this.globalPermission = permission;
   }
 
   public Permission getPermission() {
-    return globalPermission;
+    return this.globalPermission;
   }
 
   public String getName() {
-    return name;
+    return this.name;
   }
 
   @Override
   public String toString() {
-    return "Role " + name;
+    return "Role " + this.name;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/user/User.java b/azkaban-common/src/main/java/azkaban/user/User.java
index e844662..d4bfb10 100644
--- a/azkaban-common/src/main/java/azkaban/user/User.java
+++ b/azkaban-common/src/main/java/azkaban/user/User.java
@@ -23,137 +23,144 @@ import java.util.List;
 import java.util.Set;
 
 public class User {
+
   private final String userid;
+  private final Set<String> roles = new HashSet<>();
+  private final Set<String> groups = new HashSet<>();
+  private final HashMap<String, String> properties = new HashMap<>();
   private String email = "";
-  private Set<String> roles = new HashSet<String>();
-  private Set<String> groups = new HashSet<String>();
   private UserPermissions userPermissions;
 
-  private HashMap<String, String> properties = new HashMap<String, String>();
-
-  public User(String userid) {
+  public User(final String userid) {
     this.userid = userid;
   }
 
   public String getUserId() {
-    return userid;
-  }
-
-  public void setEmail(String email) {
-    this.email = email;
+    return this.userid;
   }
 
   public String getEmail() {
-    return email;
+    return this.email;
   }
 
-  public void setPermissions(UserPermissions checker) {
-    this.userPermissions = checker;
+  public void setEmail(final String email) {
+    this.email = email;
   }
 
   public UserPermissions getPermissions() {
-    return userPermissions;
+    return this.userPermissions;
+  }
+
+  public void setPermissions(final UserPermissions checker) {
+    this.userPermissions = checker;
   }
 
-  public boolean hasPermission(String permission) {
-    if (userPermissions == null) {
+  public boolean hasPermission(final String permission) {
+    if (this.userPermissions == null) {
       return false;
     }
     return this.userPermissions.hasPermission(permission);
   }
 
   public List<String> getGroups() {
-    return new ArrayList<String>(groups);
+    return new ArrayList<>(this.groups);
   }
 
   public void clearGroup() {
-    groups.clear();
+    this.groups.clear();
   }
 
-  public void addGroup(String name) {
-    groups.add(name);
+  public void addGroup(final String name) {
+    this.groups.add(name);
   }
 
-  public boolean isInGroup(String group) {
+  public boolean isInGroup(final String group) {
     return this.groups.contains(group);
   }
 
   public List<String> getRoles() {
-    return new ArrayList<String>(roles);
+    return new ArrayList<>(this.roles);
   }
 
-  public void addRole(String role) {
+  public void addRole(final String role) {
     this.roles.add(role);
   }
 
-  public boolean hasRole(String role) {
-    return roles.contains(role);
+  public boolean hasRole(final String role) {
+    return this.roles.contains(role);
   }
 
-  public String getProperty(String name) {
-    return properties.get(name);
+  public String getProperty(final String name) {
+    return this.properties.get(name);
   }
 
   @Override
   public String toString() {
     String groupStr = "[";
-    for (String group : groups) {
+    for (final String group : this.groups) {
       groupStr += group + ",";
     }
     groupStr += "]";
-    return userid + ": " + groupStr;
+    return this.userid + ": " + groupStr;
   }
 
   @Override
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + ((userid == null) ? 0 : userid.hashCode());
+    result = prime * result + ((this.userid == null) ? 0 : this.userid.hashCode());
     return result;
   }
 
   @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
+  public boolean equals(final Object obj) {
+    if (this == obj) {
       return true;
-    if (obj == null)
+    }
+    if (obj == null) {
       return false;
-    if (getClass() != obj.getClass())
+    }
+    if (getClass() != obj.getClass()) {
       return false;
-    User other = (User) obj;
-    if (userid == null) {
-      if (other.userid != null)
+    }
+    final User other = (User) obj;
+    if (this.userid == null) {
+      if (other.userid != null) {
         return false;
-    } else if (!userid.equals(other.userid))
+      }
+    } else if (!this.userid.equals(other.userid)) {
       return false;
+    }
     return true;
   }
 
   public static interface UserPermissions {
+
     public boolean hasPermission(String permission);
 
     public void addPermission(String permission);
   }
 
   public static class DefaultUserPermission implements UserPermissions {
+
     Set<String> permissions;
 
     public DefaultUserPermission() {
-      this(new HashSet<String>());
+      this(new HashSet<>());
     }
 
-    public DefaultUserPermission(Set<String> permissions) {
+    public DefaultUserPermission(final Set<String> permissions) {
       this.permissions = permissions;
     }
 
     @Override
-    public boolean hasPermission(String permission) {
-      return permissions.contains(permission);
+    public boolean hasPermission(final String permission) {
+      return this.permissions.contains(permission);
     }
 
     @Override
-    public void addPermission(String permission) {
-      permissions.add(permission);
+    public void addPermission(final String permission) {
+      this.permissions.add(permission);
     }
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/user/UserManager.java b/azkaban-common/src/main/java/azkaban/user/UserManager.java
index 254cfe5..5556f0a 100644
--- a/azkaban-common/src/main/java/azkaban/user/UserManager.java
+++ b/azkaban-common/src/main/java/azkaban/user/UserManager.java
@@ -25,14 +25,11 @@ package azkaban.user;
  * instantiation may fail.
  */
 public interface UserManager {
+
   /**
    * Retrieves the user given the username and password to authenticate against.
    *
-   * @param username
-   * @param password
-   * @return
-   * @throws UserManagerException If the username/password combination doesn't
-   *           exist.
+   * @throws UserManagerException If the username/password combination doesn't exist.
    */
   public User getUser(String username, String password)
       throws UserManagerException;
@@ -40,26 +37,17 @@ public interface UserManager {
   /**
    * Returns true if the user is valid. This is used when adding permissions for
    * users
-   *
-   * @param username
-   * @return
    */
   public boolean validateUser(String username);
 
   /**
    * Returns true if the group is valid. This is used when adding permissions
    * for groups.
-   *
-   * @param group
-   * @return
    */
   public boolean validateGroup(String group);
 
   /**
    * Returns the user role. This may return null.
-   *
-   * @param roleName
-   * @return
    */
   public Role getRole(String roleName);
 
diff --git a/azkaban-common/src/main/java/azkaban/user/UserManagerException.java b/azkaban-common/src/main/java/azkaban/user/UserManagerException.java
index a27d07b..932f5ae 100644
--- a/azkaban-common/src/main/java/azkaban/user/UserManagerException.java
+++ b/azkaban-common/src/main/java/azkaban/user/UserManagerException.java
@@ -18,16 +18,16 @@ package azkaban.user;
 
 /**
  * Exception for the UserManager to capture login errors.
- *
  */
 public class UserManagerException extends Exception {
+
   private static final long serialVersionUID = 1L;
 
-  public UserManagerException(String message) {
+  public UserManagerException(final String message) {
     super(message);
   }
 
-  public UserManagerException(String message, Throwable cause) {
+  public UserManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/user/XmlUserManager.java b/azkaban-common/src/main/java/azkaban/user/XmlUserManager.java
index b3c00e2..007865d 100644
--- a/azkaban-common/src/main/java/azkaban/user/XmlUserManager.java
+++ b/azkaban-common/src/main/java/azkaban/user/XmlUserManager.java
@@ -16,28 +16,23 @@
 
 package azkaban.user;
 
+import azkaban.user.User.UserPermissions;
+import azkaban.utils.Props;
 import java.io.File;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Set;
-
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
-
 import org.apache.log4j.Logger;
-
 import org.w3c.dom.Document;
 import org.w3c.dom.NamedNodeMap;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
-
 import org.xml.sax.SAXException;
 
-import azkaban.user.User.UserPermissions;
-import azkaban.utils.Props;
-
 /**
  * Xml implementation of the UserManager. Looks for the property
  * user.manager.xml.file in the azkaban properties.
@@ -47,8 +42,6 @@ import azkaban.utils.Props;
  * </azkaban-users>
  */
 public class XmlUserManager implements UserManager {
-  private static final Logger logger = Logger.getLogger(XmlUserManager.class
-      .getName());
 
   public static final String XML_FILE_PARAM = "user.manager.xml.file";
   public static final String AZKABAN_USERS_TAG = "azkaban-users";
@@ -64,8 +57,9 @@ public class XmlUserManager implements UserManager {
   public static final String PROXY_ATTR = "proxy";
   public static final String GROUPS_ATTR = "groups";
   public static final String GROUPNAME_ATTR = "name";
-
-  private String xmlPath;
+  private static final Logger logger = Logger.getLogger(XmlUserManager.class
+      .getName());
+  private final String xmlPath;
 
   private HashMap<String, User> users;
   private HashMap<String, String> userPassword;
@@ -75,37 +69,35 @@ public class XmlUserManager implements UserManager {
 
   /**
    * The constructor.
-   *
-   * @param props
    */
-  public XmlUserManager(Props props) {
-    xmlPath = props.getString(XML_FILE_PARAM);
+  public XmlUserManager(final Props props) {
+    this.xmlPath = props.getString(XML_FILE_PARAM);
 
     parseXMLFile();
   }
 
   private void parseXMLFile() {
-    File file = new File(xmlPath);
+    final File file = new File(this.xmlPath);
     if (!file.exists()) {
-      throw new IllegalArgumentException("User xml file " + xmlPath
+      throw new IllegalArgumentException("User xml file " + this.xmlPath
           + " doesn't exist.");
     }
 
-    HashMap<String, User> users = new HashMap<String, User>();
-    HashMap<String, String> userPassword = new HashMap<String, String>();
-    HashMap<String, Role> roles = new HashMap<String, Role>();
-    HashMap<String, Set<String>> groupRoles =
-        new HashMap<String, Set<String>>();
-    HashMap<String, Set<String>> proxyUserMap =
-        new HashMap<String, Set<String>>();
+    final HashMap<String, User> users = new HashMap<>();
+    final HashMap<String, String> userPassword = new HashMap<>();
+    final HashMap<String, Role> roles = new HashMap<>();
+    final HashMap<String, Set<String>> groupRoles =
+        new HashMap<>();
+    final HashMap<String, Set<String>> proxyUserMap =
+        new HashMap<>();
 
     // Creating the document builder to parse xml.
-    DocumentBuilderFactory docBuilderFactory =
+    final DocumentBuilderFactory docBuilderFactory =
         DocumentBuilderFactory.newInstance();
     DocumentBuilder builder = null;
     try {
       builder = docBuilderFactory.newDocumentBuilder();
-    } catch (ParserConfigurationException e) {
+    } catch (final ParserConfigurationException e) {
       throw new IllegalArgumentException(
           "Exception while parsing user xml. Document builder not created.", e);
     }
@@ -113,22 +105,22 @@ public class XmlUserManager implements UserManager {
     Document doc = null;
     try {
       doc = builder.parse(file);
-    } catch (SAXException e) {
-      throw new IllegalArgumentException("Exception while parsing " + xmlPath
+    } catch (final SAXException e) {
+      throw new IllegalArgumentException("Exception while parsing " + this.xmlPath
           + ". Invalid XML.", e);
-    } catch (IOException e) {
-      throw new IllegalArgumentException("Exception while parsing " + xmlPath
+    } catch (final IOException e) {
+      throw new IllegalArgumentException("Exception while parsing " + this.xmlPath
           + ". Error reading file.", e);
     }
 
     // Only look at first item, because we should only be seeing
     // azkaban-users tag.
-    NodeList tagList = doc.getChildNodes();
-    Node azkabanUsers = tagList.item(0);
+    final NodeList tagList = doc.getChildNodes();
+    final Node azkabanUsers = tagList.item(0);
 
-    NodeList azkabanUsersList = azkabanUsers.getChildNodes();
+    final NodeList azkabanUsersList = azkabanUsers.getChildNodes();
     for (int i = 0; i < azkabanUsersList.getLength(); ++i) {
-      Node node = azkabanUsersList.item(i);
+      final Node node = azkabanUsersList.item(i);
       if (node.getNodeType() == Node.ELEMENT_NODE) {
         if (node.getNodeName().equals(USER_TAG)) {
           parseUserTag(node, users, userPassword, proxyUserMap);
@@ -150,48 +142,48 @@ public class XmlUserManager implements UserManager {
     }
   }
 
-  private void parseUserTag(Node node, HashMap<String, User> users,
-      HashMap<String, String> userPassword,
-      HashMap<String, Set<String>> proxyUserMap) {
-    NamedNodeMap userAttrMap = node.getAttributes();
-    Node userNameAttr = userAttrMap.getNamedItem(USERNAME_ATTR);
+  private void parseUserTag(final Node node, final HashMap<String, User> users,
+      final HashMap<String, String> userPassword,
+      final HashMap<String, Set<String>> proxyUserMap) {
+    final NamedNodeMap userAttrMap = node.getAttributes();
+    final Node userNameAttr = userAttrMap.getNamedItem(USERNAME_ATTR);
     if (userNameAttr == null) {
       throw new RuntimeException("Error loading user. The '" + USERNAME_ATTR
           + "' attribute doesn't exist");
     }
 
-    Node passwordAttr = userAttrMap.getNamedItem(PASSWORD_ATTR);
+    final Node passwordAttr = userAttrMap.getNamedItem(PASSWORD_ATTR);
     if (passwordAttr == null) {
       throw new RuntimeException("Error loading user. The '" + PASSWORD_ATTR
           + "' attribute doesn't exist");
     }
 
     // Add user to the user/password map
-    String username = userNameAttr.getNodeValue();
-    String password = passwordAttr.getNodeValue();
+    final String username = userNameAttr.getNodeValue();
+    final String password = passwordAttr.getNodeValue();
     userPassword.put(username, password);
     // Add the user to the node
-    User user = new User(userNameAttr.getNodeValue());
+    final User user = new User(userNameAttr.getNodeValue());
     users.put(username, user);
     logger.info("Loading user " + user.getUserId());
 
-    Node roles = userAttrMap.getNamedItem(ROLES_ATTR);
+    final Node roles = userAttrMap.getNamedItem(ROLES_ATTR);
     if (roles != null) {
-      String value = roles.getNodeValue();
-      String[] roleSplit = value.split("\\s*,\\s*");
-      for (String role : roleSplit) {
+      final String value = roles.getNodeValue();
+      final String[] roleSplit = value.split("\\s*,\\s*");
+      for (final String role : roleSplit) {
         user.addRole(role);
       }
     }
 
-    Node proxy = userAttrMap.getNamedItem(PROXY_ATTR);
+    final Node proxy = userAttrMap.getNamedItem(PROXY_ATTR);
     if (proxy != null) {
-      String value = proxy.getNodeValue();
-      String[] proxySplit = value.split("\\s*,\\s*");
-      for (String proxyUser : proxySplit) {
+      final String value = proxy.getNodeValue();
+      final String[] proxySplit = value.split("\\s*,\\s*");
+      for (final String proxyUser : proxySplit) {
         Set<String> proxySet = proxyUserMap.get(username);
         if (proxySet == null) {
-          proxySet = new HashSet<String>();
+          proxySet = new HashSet<>();
           proxyUserMap.put(username, proxySet);
         }
 
@@ -199,56 +191,56 @@ public class XmlUserManager implements UserManager {
       }
     }
 
-    Node groups = userAttrMap.getNamedItem(GROUPS_ATTR);
+    final Node groups = userAttrMap.getNamedItem(GROUPS_ATTR);
     if (groups != null) {
-      String value = groups.getNodeValue();
-      String[] groupSplit = value.split("\\s*,\\s*");
-      for (String group : groupSplit) {
+      final String value = groups.getNodeValue();
+      final String[] groupSplit = value.split("\\s*,\\s*");
+      for (final String group : groupSplit) {
         user.addGroup(group);
       }
     }
 
-    Node emailAttr = userAttrMap.getNamedItem(EMAIL_ATTR);
+    final Node emailAttr = userAttrMap.getNamedItem(EMAIL_ATTR);
     if (emailAttr != null) {
       user.setEmail(emailAttr.getNodeValue());
     }
   }
 
-  private void parseRoleTag(Node node, HashMap<String, Role> roles) {
-    NamedNodeMap roleAttrMap = node.getAttributes();
-    Node roleNameAttr = roleAttrMap.getNamedItem(ROLENAME_ATTR);
+  private void parseRoleTag(final Node node, final HashMap<String, Role> roles) {
+    final NamedNodeMap roleAttrMap = node.getAttributes();
+    final Node roleNameAttr = roleAttrMap.getNamedItem(ROLENAME_ATTR);
     if (roleNameAttr == null) {
       throw new RuntimeException(
           "Error loading role. The role 'name' attribute doesn't exist");
     }
-    Node permissionAttr = roleAttrMap.getNamedItem(ROLEPERMISSIONS_ATTR);
+    final Node permissionAttr = roleAttrMap.getNamedItem(ROLEPERMISSIONS_ATTR);
     if (permissionAttr == null) {
       throw new RuntimeException(
           "Error loading role. The role 'permissions' attribute doesn't exist");
     }
 
-    String roleName = roleNameAttr.getNodeValue();
-    String permissions = permissionAttr.getNodeValue();
+    final String roleName = roleNameAttr.getNodeValue();
+    final String permissions = permissionAttr.getNodeValue();
 
-    String[] permissionSplit = permissions.split("\\s*,\\s*");
+    final String[] permissionSplit = permissions.split("\\s*,\\s*");
 
-    Permission perm = new Permission();
-    for (String permString : permissionSplit) {
+    final Permission perm = new Permission();
+    for (final String permString : permissionSplit) {
       try {
-        Permission.Type type = Permission.Type.valueOf(permString);
+        final Permission.Type type = Permission.Type.valueOf(permString);
         perm.addPermission(type);
-      } catch (IllegalArgumentException e) {
+      } catch (final IllegalArgumentException e) {
         logger.error("Error adding type " + permString
             + ". Permission doesn't exist.", e);
       }
     }
 
-    Role role = new Role(roleName, perm);
+    final Role role = new Role(roleName, perm);
     roles.put(roleName, role);
   }
 
   @Override
-  public User getUser(String username, String password)
+  public User getUser(final String username, final String password)
       throws UserManagerException {
     if (username == null || username.trim().isEmpty()) {
       throw new UserManagerException("Username is empty.");
@@ -261,9 +253,9 @@ public class XmlUserManager implements UserManager {
     String foundPassword = null;
     User user = null;
     synchronized (this) {
-      foundPassword = userPassword.get(username);
+      foundPassword = this.userPassword.get(username);
       if (foundPassword != null) {
-        user = users.get(username);
+        user = this.users.get(username);
       }
     }
 
@@ -283,45 +275,45 @@ public class XmlUserManager implements UserManager {
     resolveGroupRoles(user);
     user.setPermissions(new UserPermissions() {
       @Override
-      public boolean hasPermission(String permission) {
+      public boolean hasPermission(final String permission) {
         return true;
       }
 
       @Override
-      public void addPermission(String permission) {
+      public void addPermission(final String permission) {
       }
     });
     return user;
   }
 
-  private void resolveGroupRoles(User user) {
-    for (String group : user.getGroups()) {
-      Set<String> groupRoleSet = groupRoles.get(group);
+  private void resolveGroupRoles(final User user) {
+    for (final String group : user.getGroups()) {
+      final Set<String> groupRoleSet = this.groupRoles.get(group);
       if (groupRoleSet != null) {
-        for (String role : groupRoleSet) {
+        for (final String role : groupRoleSet) {
           user.addRole(role);
         }
       }
     }
   }
 
-  private void parseGroupRoleTag(Node node,
-      HashMap<String, Set<String>> groupRoles) {
-    NamedNodeMap groupAttrMap = node.getAttributes();
-    Node groupNameAttr = groupAttrMap.getNamedItem(GROUPNAME_ATTR);
+  private void parseGroupRoleTag(final Node node,
+      final HashMap<String, Set<String>> groupRoles) {
+    final NamedNodeMap groupAttrMap = node.getAttributes();
+    final Node groupNameAttr = groupAttrMap.getNamedItem(GROUPNAME_ATTR);
     if (groupNameAttr == null) {
       throw new RuntimeException(
           "Error loading role. The role 'name' attribute doesn't exist");
     }
 
-    String groupName = groupNameAttr.getNodeValue();
-    Set<String> roleSet = new HashSet<String>();
+    final String groupName = groupNameAttr.getNodeValue();
+    final Set<String> roleSet = new HashSet<>();
 
-    Node roles = groupAttrMap.getNamedItem(ROLES_ATTR);
+    final Node roles = groupAttrMap.getNamedItem(ROLES_ATTR);
     if (roles != null) {
-      String value = roles.getNodeValue();
-      String[] roleSplit = value.split("\\s*,\\s*");
-      for (String role : roleSplit) {
+      final String value = roles.getNodeValue();
+      final String[] roleSplit = value.split("\\s*,\\s*");
+      for (final String role : roleSplit) {
         roleSet.add(role);
       }
     }
@@ -331,25 +323,25 @@ public class XmlUserManager implements UserManager {
   }
 
   @Override
-  public boolean validateUser(String username) {
-    return users.containsKey(username);
+  public boolean validateUser(final String username) {
+    return this.users.containsKey(username);
   }
 
   @Override
-  public Role getRole(String roleName) {
-    return roles.get(roleName);
+  public Role getRole(final String roleName) {
+    return this.roles.get(roleName);
   }
 
   @Override
-  public boolean validateGroup(String group) {
+  public boolean validateGroup(final String group) {
     // Return true. Validation should be added when groups are added to the xml.
     return true;
   }
 
   @Override
-  public boolean validateProxyUser(String proxyUser, User realUser) {
-    if (proxyUserMap.containsKey(realUser.getUserId())
-        && proxyUserMap.get(realUser.getUserId()).contains(proxyUser)) {
+  public boolean validateProxyUser(final String proxyUser, final User realUser) {
+    if (this.proxyUserMap.containsKey(realUser.getUserId())
+        && this.proxyUserMap.get(realUser.getUserId()).contains(proxyUser)) {
       return true;
     } else {
       return false;
diff --git a/azkaban-common/src/main/java/azkaban/utils/AbstractMailer.java b/azkaban-common/src/main/java/azkaban/utils/AbstractMailer.java
index 45ac64f..ee2ef1a 100644
--- a/azkaban-common/src/main/java/azkaban/utils/AbstractMailer.java
+++ b/azkaban-common/src/main/java/azkaban/utils/AbstractMailer.java
@@ -19,37 +19,37 @@ package azkaban.utils;
 import java.util.Collection;
 
 public class AbstractMailer {
-  private static int MB_IN_BYTES = 1048576;
-  public static final int DEFAULT_SMTP_PORT = 25;
 
-  private String clientHostname;
-  private int clientPort;
-  private boolean usesSSL;
-  private boolean usesAuth;
+  public static final int DEFAULT_SMTP_PORT = 25;
+  private static final int MB_IN_BYTES = 1048576;
+  private final String clientHostname;
+  private final int clientPort;
+  private final boolean usesSSL;
+  private final boolean usesAuth;
 
-  private String mailHost;
-  private int mailPort;
-  private String mailUser;
-  private String mailPassword;
-  private String mailSender;
-  private String azkabanName;
-  private String tls;
+  private final String mailHost;
+  private final int mailPort;
+  private final String mailUser;
+  private final String mailPassword;
+  private final String mailSender;
+  private final String azkabanName;
+  private final String tls;
 
-  private String referenceURL;
+  private final String referenceURL;
 
-  private long attachmentMazSizeInByte;
+  private final long attachmentMazSizeInByte;
 
-  public AbstractMailer(Props props) {
+  public AbstractMailer(final Props props) {
     this.azkabanName = props.getString("azkaban.name", "azkaban");
     this.mailHost = props.getString("mail.host", "localhost");
     this.mailPort = props.getInt("mail.port", DEFAULT_SMTP_PORT);
     this.mailUser = props.getString("mail.user", "");
     this.mailPassword = props.getString("mail.password", "");
     this.tls = props.getString("mail.tls", "false");
-    long maxAttachmentSizeInMB =
+    final long maxAttachmentSizeInMB =
         props.getInt("mail.max.attachment.size.mb", 100);
 
-    attachmentMazSizeInByte = maxAttachmentSizeInMB * MB_IN_BYTES;
+    this.attachmentMazSizeInByte = maxAttachmentSizeInMB * MB_IN_BYTES;
 
     this.mailSender = props.getString("mail.sender", "");
     this.usesAuth = props.getBoolean("mail.useAuth", true);
@@ -58,73 +58,72 @@ public class AbstractMailer {
     this.clientPort = props.getInt("server.port");
     this.usesSSL = props.getBoolean("server.useSSL");
 
-    if (usesSSL) {
-      referenceURL =
-          "https://" + clientHostname
-              + (clientPort == 443 ? "/" : ":" + clientPort + "/");
+    if (this.usesSSL) {
+      this.referenceURL =
+          "https://" + this.clientHostname
+              + (this.clientPort == 443 ? "/" : ":" + this.clientPort + "/");
     } else {
-      referenceURL =
-          "http://" + clientHostname
-              + (clientPort == 80 ? "/" : ":" + clientPort + "/");
+      this.referenceURL =
+          "http://" + this.clientHostname
+              + (this.clientPort == 80 ? "/" : ":" + this.clientPort + "/");
     }
   }
 
   public String getReferenceURL() {
-    return referenceURL;
+    return this.referenceURL;
   }
 
-  protected EmailMessage createEmailMessage(String subject, String mimetype,
-      Collection<String> emailList) {
-    EmailMessage message = new EmailMessage(mailHost, mailPort, mailUser, mailPassword);
-    message.setFromAddress(mailSender);
+  protected EmailMessage createEmailMessage(final String subject, final String mimetype,
+      final Collection<String> emailList) {
+    final EmailMessage message = new EmailMessage(this.mailHost, this.mailPort, this.mailUser,
+        this.mailPassword);
+    message.setFromAddress(this.mailSender);
     message.addAllToAddress(emailList);
     message.setMimeType(mimetype);
     message.setSubject(subject);
-    message.setAuth(usesAuth);
-    message.setTLS(tls);
+    message.setAuth(this.usesAuth);
+    message.setTLS(this.tls);
 
     return message;
   }
 
-  public EmailMessage prepareEmailMessage(String subject, String mimetype,
-      Collection<String> emailList) {
+  public EmailMessage prepareEmailMessage(final String subject, final String mimetype,
+      final Collection<String> emailList) {
     return createEmailMessage(subject, mimetype, emailList);
   }
 
   public String getAzkabanName() {
-    return azkabanName;
+    return this.azkabanName;
   }
 
   public String getMailHost() {
-    return mailHost;
+    return this.mailHost;
   }
 
   public String getMailUser() {
-    return mailUser;
+    return this.mailUser;
   }
 
   public String getMailPassword() {
-    return mailPassword;
+    return this.mailPassword;
   }
 
   public String getMailSender() {
-    return mailSender;
+    return this.mailSender;
   }
 
   public int getMailPort() {
-    return mailPort;
+    return this.mailPort;
   }
 
   /**
    * Attachment maximum size in bytes
-   * 
-   * @return
    */
   public long getAttachmentMaxSize() {
-    return attachmentMazSizeInByte;
+    return this.attachmentMazSizeInByte;
   }
 
   public boolean hasMailAuth() {
-    return usesAuth;
+    return this.usesAuth;
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/CircularBuffer.java b/azkaban-common/src/main/java/azkaban/utils/CircularBuffer.java
index 8c08ea9..f0a2f80 100644
--- a/azkaban-common/src/main/java/azkaban/utils/CircularBuffer.java
+++ b/azkaban-common/src/main/java/azkaban/utils/CircularBuffer.java
@@ -16,13 +16,12 @@
 
 package azkaban.utils;
 
+import com.google.common.base.Joiner;
+import com.google.common.collect.Iterators;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import com.google.common.base.Joiner;
-import com.google.common.collect.Iterators;
-
 /**
  * A circular buffer of items of a given length. It will grow up to the give
  * size as items are appended, then it will begin to overwrite older items.
@@ -35,33 +34,34 @@ public class CircularBuffer<T> implements Iterable<T> {
   private final int size;
   private int start;
 
-  public CircularBuffer(int size) {
-    this.lines = new ArrayList<T>();
+  public CircularBuffer(final int size) {
+    this.lines = new ArrayList<>();
     this.size = size;
     this.start = 0;
   }
 
-  public void append(T line) {
-    if (lines.size() < size) {
-      lines.add(line);
+  public void append(final T line) {
+    if (this.lines.size() < this.size) {
+      this.lines.add(line);
     } else {
-      lines.set(start, line);
-      start = (start + 1) % size;
+      this.lines.set(this.start, line);
+      this.start = (this.start + 1) % this.size;
     }
   }
 
   @Override
   public String toString() {
-    return "[" + Joiner.on(", ").join(lines) + "]";
+    return "[" + Joiner.on(", ").join(this.lines) + "]";
   }
 
   @Override
   public Iterator<T> iterator() {
-    if (start == 0)
-      return lines.iterator();
-    else
-      return Iterators.concat(lines.subList(start, lines.size()).iterator(),
-          lines.subList(0, start).iterator());
+    if (this.start == 0) {
+      return this.lines.iterator();
+    } else {
+      return Iterators.concat(this.lines.subList(this.start, this.lines.size()).iterator(),
+          this.lines.subList(0, this.start).iterator());
+    }
   }
 
   public int getMaxSize() {
diff --git a/azkaban-common/src/main/java/azkaban/utils/Emailer.java b/azkaban-common/src/main/java/azkaban/utils/Emailer.java
index 8e7c710..1cf24ba 100644
--- a/azkaban-common/src/main/java/azkaban/utils/Emailer.java
+++ b/azkaban-common/src/main/java/azkaban/utils/Emailer.java
@@ -24,34 +24,29 @@ import azkaban.executor.Status;
 import azkaban.executor.mail.DefaultMailCreator;
 import azkaban.executor.mail.MailCreator;
 import azkaban.sla.SlaOption;
-import org.apache.log4j.Logger;
-
-import javax.mail.MessagingException;
 import java.util.ArrayList;
 import java.util.List;
+import javax.mail.MessagingException;
+import org.apache.log4j.Logger;
 
 public class Emailer extends AbstractMailer implements Alerter {
-  private static Logger logger = Logger.getLogger(Emailer.class);
 
   private static final String HTTPS = "https";
-
   private static final String HTTP = "http";
-
+  private static final Logger logger = Logger.getLogger(Emailer.class);
+  private final String scheme;
+  private final String clientHostname;
+  private final String clientPortNumber;
+  private final String mailHost;
+  private final int mailPort;
+  private final String mailUser;
+  private final String mailPassword;
+  private final String mailSender;
+  private final String azkabanName;
+  private final String tls;
   private boolean testMode = false;
 
-  private String scheme;
-  private String clientHostname;
-  private String clientPortNumber;
-
-  private String mailHost;
-  private int mailPort;
-  private String mailUser;
-  private String mailPassword;
-  private String mailSender;
-  private String azkabanName;
-  private String tls;
-
-  public Emailer(Props props) {
+  public Emailer(final Props props) {
     super(props);
     this.azkabanName = props.getString("azkaban.name", "azkaban");
     this.mailHost = props.getString("mail.host", "localhost");
@@ -61,9 +56,9 @@ public class Emailer extends AbstractMailer implements Alerter {
     this.mailSender = props.getString("mail.sender", "");
     this.tls = props.getString("mail.tls", "false");
 
-    int mailTimeout = props.getInt("mail.timeout.millis", 10000);
+    final int mailTimeout = props.getInt("mail.timeout.millis", 10000);
     EmailMessage.setTimeout(mailTimeout);
-    int connectionTimeout =
+    final int connectionTimeout =
         props.getInt("mail.connection.timeout.millis", 10000);
     EmailMessage.setConnectionTimeout(connectionTimeout);
 
@@ -79,138 +74,140 @@ public class Emailer extends AbstractMailer implements Alerter {
       this.clientPortNumber = props.getString("jetty.port");
     }
 
-    testMode = props.getBoolean("test.mode", false);
+    this.testMode = props.getBoolean("test.mode", false);
   }
 
-  @SuppressWarnings("unchecked")
-  private void sendSlaAlertEmail(SlaOption slaOption, String slaMessage) {
-    String subject = "Sla Violation Alert on " + getAzkabanName();
-    String body = slaMessage;
-    List<String> emailList =
+  public static List<String> findFailedJobs(final ExecutableFlow flow) {
+    final ArrayList<String> failedJobs = new ArrayList<>();
+    for (final ExecutableNode node : flow.getExecutableNodes()) {
+      if (node.getStatus() == Status.FAILED) {
+        failedJobs.add(node.getId());
+      }
+    }
+    return failedJobs;
+  }
+
+  private void sendSlaAlertEmail(final SlaOption slaOption, final String slaMessage) {
+    final String subject = "Sla Violation Alert on " + getAzkabanName();
+    final String body = slaMessage;
+    final List<String> emailList =
         (List<String>) slaOption.getInfo().get(SlaOption.INFO_EMAIL_LIST);
     if (emailList != null && !emailList.isEmpty()) {
-      EmailMessage message =
+      final EmailMessage message =
           super.createEmailMessage(subject, "text/html", emailList);
 
       message.setBody(body);
 
-      if (!testMode) {
+      if (!this.testMode) {
         try {
           message.sendEmail();
-        } catch (MessagingException e) {
+        } catch (final MessagingException e) {
           logger.error("Email message send failed", e);
         }
       }
     }
   }
 
-  public void sendFirstErrorMessage(ExecutableFlow flow) {
-    EmailMessage message = new EmailMessage(mailHost, mailPort, mailUser, mailPassword);
-    message.setFromAddress(mailSender);
-    message.setTLS(tls);
+  public void sendFirstErrorMessage(final ExecutableFlow flow) {
+    final EmailMessage message = new EmailMessage(this.mailHost, this.mailPort, this.mailUser,
+        this.mailPassword);
+    message.setFromAddress(this.mailSender);
+    message.setTLS(this.tls);
     message.setAuth(super.hasMailAuth());
 
-    ExecutionOptions option = flow.getExecutionOptions();
+    final ExecutionOptions option = flow.getExecutionOptions();
 
-    MailCreator mailCreator =
+    final MailCreator mailCreator =
         DefaultMailCreator.getCreator(option.getMailCreator());
 
     logger.debug("ExecutorMailer using mail creator:"
         + mailCreator.getClass().getCanonicalName());
 
-    boolean mailCreated =
-        mailCreator.createFirstErrorMessage(flow, message, azkabanName, scheme,
-            clientHostname, clientPortNumber);
+    final boolean mailCreated =
+        mailCreator.createFirstErrorMessage(flow, message, this.azkabanName, this.scheme,
+            this.clientHostname, this.clientPortNumber);
 
-    if (mailCreated && !testMode) {
+    if (mailCreated && !this.testMode) {
       try {
         message.sendEmail();
-      } catch (MessagingException e) {
+      } catch (final MessagingException e) {
         logger.error("Email message send failed", e);
       }
     }
   }
 
-  public void sendErrorEmail(ExecutableFlow flow, String... extraReasons) {
-    EmailMessage message = new EmailMessage(mailHost, mailPort, mailUser, mailPassword);
-    message.setFromAddress(mailSender);
-    message.setTLS(tls);
+  public void sendErrorEmail(final ExecutableFlow flow, final String... extraReasons) {
+    final EmailMessage message = new EmailMessage(this.mailHost, this.mailPort, this.mailUser,
+        this.mailPassword);
+    message.setFromAddress(this.mailSender);
+    message.setTLS(this.tls);
     message.setAuth(super.hasMailAuth());
 
-    ExecutionOptions option = flow.getExecutionOptions();
+    final ExecutionOptions option = flow.getExecutionOptions();
 
-    MailCreator mailCreator =
+    final MailCreator mailCreator =
         DefaultMailCreator.getCreator(option.getMailCreator());
     logger.debug("ExecutorMailer using mail creator:"
         + mailCreator.getClass().getCanonicalName());
 
-    boolean mailCreated =
-        mailCreator.createErrorEmail(flow, message, azkabanName, scheme,
-            clientHostname, clientPortNumber, extraReasons);
+    final boolean mailCreated =
+        mailCreator.createErrorEmail(flow, message, this.azkabanName, this.scheme,
+            this.clientHostname, this.clientPortNumber, extraReasons);
 
-    if (mailCreated && !testMode) {
+    if (mailCreated && !this.testMode) {
       try {
         message.sendEmail();
-      } catch (MessagingException e) {
+      } catch (final MessagingException e) {
         logger.error("Email message send failed", e);
       }
     }
   }
 
-  public void sendSuccessEmail(ExecutableFlow flow) {
-    EmailMessage message = new EmailMessage(mailHost, mailPort, mailUser, mailPassword);
-    message.setFromAddress(mailSender);
-    message.setTLS(tls);
+  public void sendSuccessEmail(final ExecutableFlow flow) {
+    final EmailMessage message = new EmailMessage(this.mailHost, this.mailPort, this.mailUser,
+        this.mailPassword);
+    message.setFromAddress(this.mailSender);
+    message.setTLS(this.tls);
     message.setAuth(super.hasMailAuth());
 
-    ExecutionOptions option = flow.getExecutionOptions();
+    final ExecutionOptions option = flow.getExecutionOptions();
 
-    MailCreator mailCreator =
+    final MailCreator mailCreator =
         DefaultMailCreator.getCreator(option.getMailCreator());
     logger.debug("ExecutorMailer using mail creator:"
         + mailCreator.getClass().getCanonicalName());
 
-    boolean mailCreated =
-        mailCreator.createSuccessEmail(flow, message, azkabanName, scheme,
-            clientHostname, clientPortNumber);
+    final boolean mailCreated =
+        mailCreator.createSuccessEmail(flow, message, this.azkabanName, this.scheme,
+            this.clientHostname, this.clientPortNumber);
 
-    if (mailCreated && !testMode) {
+    if (mailCreated && !this.testMode) {
       try {
         message.sendEmail();
-      } catch (MessagingException e) {
+      } catch (final MessagingException e) {
         logger.error("Email message send failed", e);
       }
     }
   }
 
-  public static List<String> findFailedJobs(ExecutableFlow flow) {
-    ArrayList<String> failedJobs = new ArrayList<String>();
-    for (ExecutableNode node : flow.getExecutableNodes()) {
-      if (node.getStatus() == Status.FAILED) {
-        failedJobs.add(node.getId());
-      }
-    }
-    return failedJobs;
-  }
-
   @Override
-  public void alertOnSuccess(ExecutableFlow exflow) throws Exception {
+  public void alertOnSuccess(final ExecutableFlow exflow) throws Exception {
     sendSuccessEmail(exflow);
   }
 
   @Override
-  public void alertOnError(ExecutableFlow exflow, String... extraReasons)
+  public void alertOnError(final ExecutableFlow exflow, final String... extraReasons)
       throws Exception {
     sendErrorEmail(exflow, extraReasons);
   }
 
   @Override
-  public void alertOnFirstError(ExecutableFlow exflow) throws Exception {
+  public void alertOnFirstError(final ExecutableFlow exflow) throws Exception {
     sendFirstErrorMessage(exflow);
   }
 
   @Override
-  public void alertOnSla(SlaOption slaOption, String slaMessage)
+  public void alertOnSla(final SlaOption slaOption, final String slaMessage)
       throws Exception {
     sendSlaAlertEmail(slaOption, slaMessage);
   }
diff --git a/azkaban-common/src/main/java/azkaban/utils/EmailMessage.java b/azkaban-common/src/main/java/azkaban/utils/EmailMessage.java
index b3b1aa5..ef54182 100644
--- a/azkaban-common/src/main/java/azkaban/utils/EmailMessage.java
+++ b/azkaban-common/src/main/java/azkaban/utils/EmailMessage.java
@@ -17,8 +17,14 @@
 package azkaban.utils;
 
 import com.sun.mail.smtp.SMTPTransport;
-import org.apache.log4j.Logger;
-
+import java.io.File;
+import java.io.InputStream;
+import java.net.SocketTimeoutException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.Properties;
 import javax.activation.DataHandler;
 import javax.activation.DataSource;
 import javax.activation.FileDataSource;
@@ -30,18 +36,20 @@ import javax.mail.internet.InternetAddress;
 import javax.mail.internet.MimeBodyPart;
 import javax.mail.internet.MimeMessage;
 import javax.mail.internet.MimeMultipart;
-import java.io.File;
-import java.io.InputStream;
-import java.net.SocketTimeoutException;
-import java.util.*;
+import org.apache.log4j.Logger;
 
 public class EmailMessage {
-  private final Logger logger = Logger.getLogger(EmailMessage.class);
 
-  private static String protocol = "smtp";
-  private List<String> _toAddress = new ArrayList<String>();
+  private static final String protocol = "smtp";
+  private static int _mailTimeout = 10000;
+  private static int _connectionTimeout = 10000;
+  private static long _totalAttachmentMaxSizeInByte = 1024 * 1024 * 1024; // 1
+  private final Logger logger = Logger.getLogger(EmailMessage.class);
+  private final List<String> _toAddress = new ArrayList<>();
+  private final int _mailPort;
+  // GB
+  private final ArrayList<BodyPart> _attachments = new ArrayList<>();
   private String _mailHost;
-  private int _mailPort;
   private String _mailUser;
   private String _mailPassword;
   private String _subject;
@@ -52,33 +60,27 @@ public class EmailMessage {
   private boolean _usesAuth = true;
   private boolean _enableAttachementEmbedment = true;
   private StringBuffer _body = new StringBuffer();
-  private static int _mailTimeout = 10000;
-  private static int _connectionTimeout = 10000;
-  private static long _totalAttachmentMaxSizeInByte = 1024 * 1024 * 1024; // 1
-                                                                          // GB
-
-  private ArrayList<BodyPart> _attachments = new ArrayList<BodyPart>();
 
   public EmailMessage() {
     this("localhost", AbstractMailer.DEFAULT_SMTP_PORT, "", "");
   }
 
-  public EmailMessage(String host, int port, String user, String password) {
-    _mailUser = user;
-    _mailHost = host;
-    _mailPort = port;
-    _mailPassword = password;
+  public EmailMessage(final String host, final int port, final String user, final String password) {
+    this._mailUser = user;
+    this._mailHost = host;
+    this._mailPort = port;
+    this._mailPassword = password;
   }
 
-  public static void setTimeout(int timeoutMillis) {
+  public static void setTimeout(final int timeoutMillis) {
     _mailTimeout = timeoutMillis;
   }
 
-  public static void setConnectionTimeout(int timeoutMillis) {
+  public static void setConnectionTimeout(final int timeoutMillis) {
     _connectionTimeout = timeoutMillis;
   }
 
-  public static void setTotalAttachmentMaxSize(long sizeInBytes) {
+  public static void setTotalAttachmentMaxSize(final long sizeInBytes) {
     if (sizeInBytes < 1) {
       throw new IllegalArgumentException(
           "attachment max size can't be 0 or negative");
@@ -86,170 +88,166 @@ public class EmailMessage {
     _totalAttachmentMaxSizeInByte = sizeInBytes;
   }
 
-  public EmailMessage setMailHost(String host) {
-    _mailHost = host;
+  public EmailMessage setMailHost(final String host) {
+    this._mailHost = host;
     return this;
   }
 
-  public EmailMessage setMailUser(String user) {
-    _mailUser = user;
+  public EmailMessage setMailUser(final String user) {
+    this._mailUser = user;
     return this;
   }
 
-  public EmailMessage enableAttachementEmbedment(boolean toEnable) {
-    _enableAttachementEmbedment = toEnable;
+  public EmailMessage enableAttachementEmbedment(final boolean toEnable) {
+    this._enableAttachementEmbedment = toEnable;
     return this;
   }
 
-  public EmailMessage setMailPassword(String password) {
-    _mailPassword = password;
+  public EmailMessage setMailPassword(final String password) {
+    this._mailPassword = password;
     return this;
   }
 
-  public EmailMessage addAllToAddress(Collection<? extends String> addresses) {
-    _toAddress.addAll(addresses);
+  public EmailMessage addAllToAddress(final Collection<? extends String> addresses) {
+    this._toAddress.addAll(addresses);
     return this;
   }
 
-  public EmailMessage addToAddress(String address) {
-    _toAddress.add(address);
+  public EmailMessage addToAddress(final String address) {
+    this._toAddress.add(address);
     return this;
   }
 
-  public EmailMessage setSubject(String subject) {
-    _subject = subject;
+  public EmailMessage setFromAddress(final String fromAddress) {
+    this._fromAddress = fromAddress;
     return this;
   }
 
-  public EmailMessage setFromAddress(String fromAddress) {
-    _fromAddress = fromAddress;
+  public EmailMessage setTLS(final String tls) {
+    this._tls = tls;
     return this;
   }
 
-  public EmailMessage setTLS(String tls) {
-    _tls = tls;
+  public EmailMessage setAuth(final boolean auth) {
+    this._usesAuth = auth;
     return this;
   }
 
-  public EmailMessage setAuth(boolean auth) {
-    _usesAuth = auth;
-    return this;
-  }
-
-  public EmailMessage addAttachment(File file) throws MessagingException {
+  public EmailMessage addAttachment(final File file) throws MessagingException {
     return addAttachment(file.getName(), file);
   }
 
-  public EmailMessage addAttachment(String attachmentName, File file)
+  public EmailMessage addAttachment(final String attachmentName, final File file)
       throws MessagingException {
 
-    _totalAttachmentSizeSoFar += file.length();
+    this._totalAttachmentSizeSoFar += file.length();
 
-    if (_totalAttachmentSizeSoFar > _totalAttachmentMaxSizeInByte) {
+    if (this._totalAttachmentSizeSoFar > _totalAttachmentMaxSizeInByte) {
       throw new MessageAttachmentExceededMaximumSizeException(
           "Adding attachment '" + attachmentName
               + "' will exceed the allowed maximum size of "
               + _totalAttachmentMaxSizeInByte);
     }
 
-    BodyPart attachmentPart = new MimeBodyPart();
-    DataSource fileDataSource = new FileDataSource(file);
+    final BodyPart attachmentPart = new MimeBodyPart();
+    final DataSource fileDataSource = new FileDataSource(file);
     attachmentPart.setDataHandler(new DataHandler(fileDataSource));
     attachmentPart.setFileName(attachmentName);
-    _attachments.add(attachmentPart);
+    this._attachments.add(attachmentPart);
     return this;
   }
 
-  public EmailMessage addAttachment(String attachmentName, InputStream stream)
+  public EmailMessage addAttachment(final String attachmentName, final InputStream stream)
       throws MessagingException {
-    BodyPart attachmentPart = new MimeBodyPart(stream);
+    final BodyPart attachmentPart = new MimeBodyPart(stream);
     attachmentPart.setFileName(attachmentName);
-    _attachments.add(attachmentPart);
+    this._attachments.add(attachmentPart);
     return this;
   }
 
   private void checkSettings() {
-    if (_mailHost == null) {
+    if (this._mailHost == null) {
       throw new RuntimeException("Mail host not set.");
     }
 
-    if (_fromAddress == null || _fromAddress.length() == 0) {
+    if (this._fromAddress == null || this._fromAddress.length() == 0) {
       throw new RuntimeException("From address not set.");
     }
 
-    if (_subject == null) {
+    if (this._subject == null) {
       throw new RuntimeException("Subject cannot be null");
     }
 
-    if (_toAddress.size() == 0) {
+    if (this._toAddress.size() == 0) {
       throw new RuntimeException("T");
     }
   }
 
   public void sendEmail() throws MessagingException {
     checkSettings();
-    Properties props = new Properties();
-    if (_usesAuth) {
+    final Properties props = new Properties();
+    if (this._usesAuth) {
       props.put("mail." + protocol + ".auth", "true");
-      props.put("mail.user", _mailUser);
-      props.put("mail.password", _mailPassword);
+      props.put("mail.user", this._mailUser);
+      props.put("mail.password", this._mailPassword);
     } else {
       props.put("mail." + protocol + ".auth", "false");
     }
-    props.put("mail." + protocol + ".host", _mailHost);
-    props.put("mail." + protocol + ".port", _mailPort);
+    props.put("mail." + protocol + ".host", this._mailHost);
+    props.put("mail." + protocol + ".port", this._mailPort);
     props.put("mail." + protocol + ".timeout", _mailTimeout);
     props.put("mail." + protocol + ".connectiontimeout", _connectionTimeout);
-    props.put("mail.smtp.starttls.enable", _tls);
-    props.put("mail.smtp.ssl.trust", _mailHost);
+    props.put("mail.smtp.starttls.enable", this._tls);
+    props.put("mail.smtp.ssl.trust", this._mailHost);
 
-    Session session = Session.getInstance(props, null);
-    Message message = new MimeMessage(session);
-    InternetAddress from = new InternetAddress(_fromAddress, false);
+    final Session session = Session.getInstance(props, null);
+    final Message message = new MimeMessage(session);
+    final InternetAddress from = new InternetAddress(this._fromAddress, false);
     message.setFrom(from);
-    for (String toAddr : _toAddress)
+    for (final String toAddr : this._toAddress) {
       message.addRecipient(Message.RecipientType.TO, new InternetAddress(
           toAddr, false));
-    message.setSubject(_subject);
+    }
+    message.setSubject(this._subject);
     message.setSentDate(new Date());
 
-    if (_attachments.size() > 0) {
-      MimeMultipart multipart =
+    if (this._attachments.size() > 0) {
+      final MimeMultipart multipart =
           this._enableAttachementEmbedment ? new MimeMultipart("related")
               : new MimeMultipart();
 
-      BodyPart messageBodyPart = new MimeBodyPart();
-      messageBodyPart.setContent(_body.toString(), _mimeType);
+      final BodyPart messageBodyPart = new MimeBodyPart();
+      messageBodyPart.setContent(this._body.toString(), this._mimeType);
       multipart.addBodyPart(messageBodyPart);
 
       // Add attachments
-      for (BodyPart part : _attachments) {
+      for (final BodyPart part : this._attachments) {
         multipart.addBodyPart(part);
       }
 
       message.setContent(multipart);
     } else {
-      message.setContent(_body.toString(), _mimeType);
+      message.setContent(this._body.toString(), this._mimeType);
     }
 
     // Transport transport = session.getTransport();
 
-    SMTPTransport t = (SMTPTransport) session.getTransport(protocol);
+    final SMTPTransport t = (SMTPTransport) session.getTransport(protocol);
 
     try {
       connectToSMTPServer(t);
-    } catch (MessagingException ste) {
+    } catch (final MessagingException ste) {
       if (ste.getCause() instanceof SocketTimeoutException) {
         try {
           // retry on SocketTimeoutException
           connectToSMTPServer(t);
-          logger.info("Email retry on SocketTimeoutException succeeded");
-        } catch (MessagingException me) {
-          logger.error("Email retry on SocketTimeoutException failed", me);
+          this.logger.info("Email retry on SocketTimeoutException succeeded");
+        } catch (final MessagingException me) {
+          this.logger.error("Email retry on SocketTimeoutException failed", me);
           throw me;
         }
       } else {
-        logger.error("Encountered issue while connecting to email server", ste);
+        this.logger.error("Encountered issue while connecting to email server", ste);
         throw ste;
       }
     }
@@ -257,44 +255,49 @@ public class EmailMessage {
     t.close();
   }
 
-  private void connectToSMTPServer(SMTPTransport t) throws MessagingException {
-    if (_usesAuth) {
-      t.connect(_mailHost, _mailPort, _mailUser, _mailPassword);
+  private void connectToSMTPServer(final SMTPTransport t) throws MessagingException {
+    if (this._usesAuth) {
+      t.connect(this._mailHost, this._mailPort, this._mailUser, this._mailPassword);
     } else {
       t.connect();
     }
   }
 
-  public void setBody(String body) {
-    setBody(body, _mimeType);
+  public void setBody(final String body, final String mimeType) {
+    this._body = new StringBuffer(body);
+    this._mimeType = mimeType;
   }
 
-  public void setBody(String body, String mimeType) {
-    _body = new StringBuffer(body);
-    _mimeType = mimeType;
-  }
-
-  public EmailMessage setMimeType(String mimeType) {
-    _mimeType = mimeType;
+  public EmailMessage setMimeType(final String mimeType) {
+    this._mimeType = mimeType;
     return this;
   }
 
-  public EmailMessage println(Object str) {
-    _body.append(str);
+  public EmailMessage println(final Object str) {
+    this._body.append(str);
 
     return this;
   }
 
   public String getBody() {
-    return _body.toString();
+    return this._body.toString();
+  }
+
+  public void setBody(final String body) {
+    setBody(body, this._mimeType);
   }
 
   public String getSubject() {
-    return _subject;
+    return this._subject;
+  }
+
+  public EmailMessage setSubject(final String subject) {
+    this._subject = subject;
+    return this;
   }
 
-  public int getMailPort(){
-    return _mailPort;
+  public int getMailPort() {
+    return this._mailPort;
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/ExternalLinkUtils.java b/azkaban-common/src/main/java/azkaban/utils/ExternalLinkUtils.java
index 4d9db49..e2692d3 100644
--- a/azkaban-common/src/main/java/azkaban/utils/ExternalLinkUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/ExternalLinkUtils.java
@@ -17,52 +17,59 @@
 package azkaban.utils;
 
 import azkaban.Constants;
-
 import java.io.UnsupportedEncodingException;
 import java.net.URLEncoder;
-
 import javax.servlet.http.HttpServletRequest;
-
 import org.apache.log4j.Logger;
 
 public class ExternalLinkUtils {
+
   private static final Logger logger = Logger.getLogger(ExternalLinkUtils.class);
 
-  public static String getExternalAnalyzerOnReq(Props azkProps, HttpServletRequest req) {
+  public static String getExternalAnalyzerOnReq(final Props azkProps,
+      final HttpServletRequest req) {
     // If no topic was configured to be an external analyzer, return empty
     if (!azkProps.containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_ANALYZER_TOPIC)) {
       return "";
     }
     // Find out which external link we should use to lead to our analyzer
-    String topic = azkProps.getString(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_ANALYZER_TOPIC);
+    final String topic = azkProps
+        .getString(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_ANALYZER_TOPIC);
     return getLinkFromRequest(topic, azkProps, req);
   }
 
-  public static String getExternalLogViewer(Props azkProps, String jobId, Props jobProps) {
+  public static String getExternalLogViewer(final Props azkProps, final String jobId,
+      final Props jobProps) {
     // If no topic was configured to be an external analyzer, return empty
-    if (!azkProps.containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_LOGVIEWER_TOPIC)) {
+    if (!azkProps
+        .containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_LOGVIEWER_TOPIC)) {
       return "";
     }
     // Find out which external link we should use to lead to our log viewer
-    String topic = azkProps.getString(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_LOGVIEWER_TOPIC);
+    final String topic = azkProps
+        .getString(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_LOGVIEWER_TOPIC);
     return getLinkFromJobAndExecId(topic, azkProps, jobId, jobProps);
   }
 
-  private static String getLinkFromJobAndExecId(String topic, Props azkProps, String jobId, Props jobProps) {
+  private static String getLinkFromJobAndExecId(final String topic, final Props azkProps,
+      final String jobId,
+      final Props jobProps) {
     String urlTemplate = getURLForTopic(topic, azkProps);
     if (urlTemplate.isEmpty()) {
       logger.error("No URL specified for topic " + topic);
       return "";
     }
-    String job = encodeToUTF8(jobId);
-    String execid = encodeToUTF8(jobProps.getString(Constants.FlowProperties.AZKABAN_FLOW_EXEC_ID));
+    final String job = encodeToUTF8(jobId);
+    final String execid = encodeToUTF8(
+        jobProps.getString(Constants.FlowProperties.AZKABAN_FLOW_EXEC_ID));
 
     urlTemplate = urlTemplate.replace("${jobid}", job).replace("${execid}", execid);
     logger.info("Creating link: " + urlTemplate);
     return urlTemplate;
   }
 
-  private static String getLinkFromRequest(String topic, Props azkProps, HttpServletRequest req) {
+  private static String getLinkFromRequest(final String topic, final Props azkProps,
+      final HttpServletRequest req) {
     String urlTemplate = getURLForTopic(topic, azkProps);
     if (urlTemplate.isEmpty()) {
       logger.error("No URL specified for topic " + topic);
@@ -79,14 +86,16 @@ public class ExternalLinkUtils {
     return urlTemplate;
   }
 
-  static String getURLForTopic(String topic, Props azkProps) {
-    return azkProps.getString(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL.replace("${topic}", topic), "");
+  static String getURLForTopic(final String topic, final Props azkProps) {
+    return azkProps.getString(
+        Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL.replace("${topic}", topic),
+        "");
   }
 
-  static String encodeToUTF8(String url) {
+  static String encodeToUTF8(final String url) {
     try {
       return URLEncoder.encode(url, "UTF-8").replaceAll("\\+", "%20");
-    } catch (UnsupportedEncodingException e) {
+    } catch (final UnsupportedEncodingException e) {
       logger.error("Specified encoding is not supported", e);
     }
     return "";
diff --git a/azkaban-common/src/main/java/azkaban/utils/FileIOUtils.java b/azkaban-common/src/main/java/azkaban/utils/FileIOUtils.java
index ecb9463..c8ae211 100644
--- a/azkaban-common/src/main/java/azkaban/utils/FileIOUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/FileIOUtils.java
@@ -30,7 +30,6 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 import java.util.StringTokenizer;
-
 import org.apache.commons.io.IOUtils;
 import org.apache.log4j.Logger;
 
@@ -40,6 +39,7 @@ import org.apache.log4j.Logger;
  * future.
  */
 public class FileIOUtils {
+
   private final static Logger logger = Logger.getLogger(FileIOUtils.class);
 
   /**
@@ -48,7 +48,7 @@ public class FileIOUtils {
    * @param dir directory file object
    * @return true if it is writable. false, otherwise
    */
-  public static boolean isDirWritable(File dir) {
+  public static boolean isDirWritable(final File dir) {
     File testFile = null;
     try {
       testFile = new File(dir, "_tmp");
@@ -57,7 +57,7 @@ public class FileIOUtils {
        * there is a safer way for this check.
        */
       testFile.createNewFile();
-    } catch (IOException e) {
+    } catch (final IOException e) {
       return false;
     } finally {
       if (testFile != null) {
@@ -67,39 +67,14 @@ public class FileIOUtils {
     return true;
   }
 
-  public static class PrefixSuffixFileFilter implements FileFilter {
-    private String prefix;
-    private String suffix;
-
-    public PrefixSuffixFileFilter(String prefix, String suffix) {
-      this.prefix = prefix;
-      this.suffix = suffix;
-    }
-
-    @Override
-    public boolean accept(File pathname) {
-      if (!pathname.isFile() || pathname.isHidden()) {
-        return false;
-      }
-
-      String name = pathname.getName();
-      int length = name.length();
-      if (suffix.length() > length || prefix.length() > length) {
-        return false;
-      }
-
-      return name.startsWith(prefix) && name.endsWith(suffix);
-    }
-  }
-
-  public static String getSourcePathFromClass(Class<?> containedClass) {
+  public static String getSourcePathFromClass(final Class<?> containedClass) {
     File file =
         new File(containedClass.getProtectionDomain().getCodeSource()
             .getLocation().getPath());
 
     if (!file.isDirectory() && file.getName().endsWith(".class")) {
-      String name = containedClass.getName();
-      StringTokenizer tokenizer = new StringTokenizer(name, ".");
+      final String name = containedClass.getName();
+      final StringTokenizer tokenizer = new StringTokenizer(name, ".");
       while (tokenizer.hasMoreTokens()) {
         tokenizer.nextElement();
         file = file.getParentFile();
@@ -115,7 +90,7 @@ public class FileIOUtils {
    * Run a unix command that will hard link files and recurse into directories.
    */
 
-  public static void createDeepHardlink(File sourceDir, File destDir)
+  public static void createDeepHardlink(final File sourceDir, final File destDir)
       throws IOException {
     if (!sourceDir.exists()) {
       throw new IOException("Source directory " + sourceDir.getPath()
@@ -127,12 +102,12 @@ public class FileIOUtils {
       throw new IOException("Source or Destination is not a directory.");
     }
 
-    Set<String> paths = new HashSet<String>();
+    final Set<String> paths = new HashSet<>();
     createDirsFindFiles(sourceDir, sourceDir, destDir, paths);
 
-    StringBuffer buffer = new StringBuffer();
+    final StringBuffer buffer = new StringBuffer();
     for (String path : paths) {
-      File sourceLink = new File(sourceDir, path);
+      final File sourceLink = new File(sourceDir, path);
       path = "." + path;
 
       buffer.append("ln ").append(sourceLink.getAbsolutePath()).append("/*")
@@ -142,16 +117,16 @@ public class FileIOUtils {
     runShellCommand(buffer.toString(), destDir);
   }
 
-  private static void runShellCommand(String command, File workingDir)
+  private static void runShellCommand(final String command, final File workingDir)
       throws IOException {
-    ProcessBuilder builder = new ProcessBuilder().command("sh", "-c", command);
+    final ProcessBuilder builder = new ProcessBuilder().command("sh", "-c", command);
     builder.directory(workingDir);
 
     // XXX what about stopping threads ??
-    Process process = builder.start();
+    final Process process = builder.start();
     try {
-      NullLogger errorLogger = new NullLogger(process.getErrorStream());
-      NullLogger inputLogger = new NullLogger(process.getInputStream());
+      final NullLogger errorLogger = new NullLogger(process.getErrorStream());
+      final NullLogger inputLogger = new NullLogger(process.getInputStream());
       errorLogger.start();
       inputLogger.start();
 
@@ -166,7 +141,7 @@ public class FileIOUtils {
 
           throw new IOException(errorMessage);
         }
-      } catch (InterruptedException e) {
+      } catch (final InterruptedException e) {
         logger.error(e);
       }
     } finally {
@@ -177,69 +152,35 @@ public class FileIOUtils {
 
   }
 
-  private static void createDirsFindFiles(File baseDir, File sourceDir,
-      File destDir, Set<String> paths) {
-    File[] srcList = sourceDir.listFiles();
-    String path = getRelativePath(baseDir, sourceDir);
+  private static void createDirsFindFiles(final File baseDir, final File sourceDir,
+      final File destDir, final Set<String> paths) {
+    final File[] srcList = sourceDir.listFiles();
+    final String path = getRelativePath(baseDir, sourceDir);
     paths.add(path);
 
-    for (File file : srcList) {
+    for (final File file : srcList) {
       if (file.isDirectory()) {
-        File newDestDir = new File(destDir, file.getName());
+        final File newDestDir = new File(destDir, file.getName());
         newDestDir.mkdirs();
         createDirsFindFiles(baseDir, file, newDestDir, paths);
       }
     }
   }
 
-  private static String getRelativePath(File basePath, File sourceDir) {
+  private static String getRelativePath(final File basePath, final File sourceDir) {
     return sourceDir.getPath().substring(basePath.getPath().length());
   }
 
-  private static class NullLogger extends Thread {
-    private final BufferedReader inputReader;
-    private CircularBuffer<String> buffer = new CircularBuffer<String>(5);
+  public static Pair<Integer, Integer> readUtf8File(final File file, final int offset,
+      final int length, final OutputStream stream) throws IOException {
+    final byte[] buffer = new byte[length];
 
-    public NullLogger(InputStream stream) {
-      inputReader = new BufferedReader(new InputStreamReader(stream));
-    }
+    final FileInputStream fileStream = new FileInputStream(file);
 
-    @Override
-    public void run() {
-      try {
-        while (!Thread.currentThread().isInterrupted()) {
-          String line = inputReader.readLine();
-          if (line == null) {
-            return;
-          }
-          buffer.append(line);
-        }
-      } catch (IOException e) {
-        e.printStackTrace();
-      }
-    }
-
-    public String getLastMessages() {
-      StringBuffer messageBuffer = new StringBuffer();
-      for (String message : buffer) {
-        messageBuffer.append(message);
-        messageBuffer.append("\n");
-      }
-
-      return messageBuffer.toString();
-    }
-  }
-
-  public static Pair<Integer, Integer> readUtf8File(File file, int offset,
-      int length, OutputStream stream) throws IOException {
-    byte[] buffer = new byte[length];
-
-    FileInputStream fileStream = new FileInputStream(file);
-
-    long skipped = fileStream.skip(offset);
+    final long skipped = fileStream.skip(offset);
     if (skipped < offset) {
       fileStream.close();
-      return new Pair<Integer, Integer>(0, 0);
+      return new Pair<>(0, 0);
     }
 
     BufferedInputStream inputStream = null;
@@ -250,19 +191,19 @@ public class FileIOUtils {
       IOUtils.closeQuietly(inputStream);
     }
 
-    Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, length);
+    final Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, length);
     stream.write(buffer, utf8Range.getFirst(), utf8Range.getSecond());
 
-    return new Pair<Integer, Integer>(offset + utf8Range.getFirst(),
+    return new Pair<>(offset + utf8Range.getFirst(),
         utf8Range.getSecond());
   }
 
-  public static LogData readUtf8File(File file, int fileOffset, int length)
+  public static LogData readUtf8File(final File file, final int fileOffset, final int length)
       throws IOException {
-    byte[] buffer = new byte[length];
-    FileInputStream fileStream = new FileInputStream(file);
+    final byte[] buffer = new byte[length];
+    final FileInputStream fileStream = new FileInputStream(file);
 
-    long skipped = fileStream.skip(fileOffset);
+    final long skipped = fileStream.skip(fileOffset);
     if (skipped < fileOffset) {
       fileStream.close();
       return new LogData(fileOffset, 0, "");
@@ -280,20 +221,20 @@ public class FileIOUtils {
     if (read <= 0) {
       return new LogData(fileOffset, 0, "");
     }
-    Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
-    String outputString =
+    final Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
+    final String outputString =
         new String(buffer, utf8Range.getFirst(), utf8Range.getSecond());
 
     return new LogData(fileOffset + utf8Range.getFirst(),
         utf8Range.getSecond(), outputString);
   }
 
-  public static JobMetaData readUtf8MetaDataFile(File file, int fileOffset,
-      int length) throws IOException {
-    byte[] buffer = new byte[length];
-    FileInputStream fileStream = new FileInputStream(file);
+  public static JobMetaData readUtf8MetaDataFile(final File file, final int fileOffset,
+      final int length) throws IOException {
+    final byte[] buffer = new byte[length];
+    final FileInputStream fileStream = new FileInputStream(file);
 
-    long skipped = fileStream.skip(fileOffset);
+    final long skipped = fileStream.skip(fileOffset);
     if (skipped < fileOffset) {
       fileStream.close();
       return new JobMetaData(fileOffset, 0, "");
@@ -311,8 +252,8 @@ public class FileIOUtils {
     if (read <= 0) {
       return new JobMetaData(fileOffset, 0, "");
     }
-    Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
-    String outputString =
+    final Pair<Integer, Integer> utf8Range = getUtf8Range(buffer, 0, read);
+    final String outputString =
         new String(buffer, utf8Range.getFirst(), utf8Range.getSecond());
 
     return new JobMetaData(fileOffset + utf8Range.getFirst(),
@@ -322,18 +263,18 @@ public class FileIOUtils {
   /**
    * Returns first and length.
    */
-  public static Pair<Integer, Integer> getUtf8Range(byte[] buffer, int offset,
-      int length) {
-    int start = getUtf8ByteStart(buffer, offset);
-    int end = getUtf8ByteEnd(buffer, offset + length - 1);
+  public static Pair<Integer, Integer> getUtf8Range(final byte[] buffer, final int offset,
+      final int length) {
+    final int start = getUtf8ByteStart(buffer, offset);
+    final int end = getUtf8ByteEnd(buffer, offset + length - 1);
 
-    return new Pair<Integer, Integer>(start, end - start + 1);
+    return new Pair<>(start, end - start + 1);
   }
 
-  private static int getUtf8ByteStart(byte[] buffer, int offset) {
+  private static int getUtf8ByteStart(final byte[] buffer, final int offset) {
     // If it's a proper utf-8, we should find it within the next 6 bytes.
     for (int i = offset; i < offset + 6 && i < buffer.length; i++) {
-      byte b = buffer[i];
+      final byte b = buffer[i];
       // check the mask 0x80 is 0, which is a proper ascii
       if ((0x80 & b) == 0) {
         return i;
@@ -346,10 +287,10 @@ public class FileIOUtils {
     return offset;
   }
 
-  private static int getUtf8ByteEnd(byte[] buffer, int offset) {
+  private static int getUtf8ByteEnd(final byte[] buffer, final int offset) {
     // If it's a proper utf-8, we should find it within the previous 12 bytes.
     for (int i = offset; i > offset - 11 && i >= 0; i--) {
-      byte b = buffer[i];
+      final byte b = buffer[i];
       // check the mask 0x80 is 0, which is a proper ascii. Just return
       if ((0x80 & b) == 0) {
         return i;
@@ -387,96 +328,159 @@ public class FileIOUtils {
     return offset;
   }
 
+  public static class PrefixSuffixFileFilter implements FileFilter {
+
+    private final String prefix;
+    private final String suffix;
+
+    public PrefixSuffixFileFilter(final String prefix, final String suffix) {
+      this.prefix = prefix;
+      this.suffix = suffix;
+    }
+
+    @Override
+    public boolean accept(final File pathname) {
+      if (!pathname.isFile() || pathname.isHidden()) {
+        return false;
+      }
+
+      final String name = pathname.getName();
+      final int length = name.length();
+      if (this.suffix.length() > length || this.prefix.length() > length) {
+        return false;
+      }
+
+      return name.startsWith(this.prefix) && name.endsWith(this.suffix);
+    }
+  }
+
+  private static class NullLogger extends Thread {
+
+    private final BufferedReader inputReader;
+    private final CircularBuffer<String> buffer = new CircularBuffer<>(5);
+
+    public NullLogger(final InputStream stream) {
+      this.inputReader = new BufferedReader(new InputStreamReader(stream));
+    }
+
+    @Override
+    public void run() {
+      try {
+        while (!Thread.currentThread().isInterrupted()) {
+          final String line = this.inputReader.readLine();
+          if (line == null) {
+            return;
+          }
+          this.buffer.append(line);
+        }
+      } catch (final IOException e) {
+        e.printStackTrace();
+      }
+    }
+
+    public String getLastMessages() {
+      final StringBuffer messageBuffer = new StringBuffer();
+      for (final String message : this.buffer) {
+        messageBuffer.append(message);
+        messageBuffer.append("\n");
+      }
+
+      return messageBuffer.toString();
+    }
+  }
+
   public static class LogData {
-    private int offset;
-    private int length;
-    private String data;
 
-    public LogData(int offset, int length, String data) {
+    private final int offset;
+    private final int length;
+    private final String data;
+
+    public LogData(final int offset, final int length, final String data) {
       this.offset = offset;
       this.length = length;
       this.data = data;
     }
 
+    public static LogData createLogDataFromObject(final Map<String, Object> map) {
+      final int offset = (Integer) map.get("offset");
+      final int length = (Integer) map.get("length");
+      final String data = (String) map.get("data");
+
+      return new LogData(offset, length, data);
+    }
+
     public int getOffset() {
-      return offset;
+      return this.offset;
     }
 
     public int getLength() {
-      return length;
+      return this.length;
     }
 
     public String getData() {
-      return data;
+      return this.data;
     }
 
     public Map<String, Object> toObject() {
-      HashMap<String, Object> map = new HashMap<String, Object>();
-      map.put("offset", offset);
-      map.put("length", length);
-      map.put("data", data);
+      final HashMap<String, Object> map = new HashMap<>();
+      map.put("offset", this.offset);
+      map.put("length", this.length);
+      map.put("data", this.data);
 
       return map;
     }
 
-    public static LogData createLogDataFromObject(Map<String, Object> map) {
-      int offset = (Integer) map.get("offset");
-      int length = (Integer) map.get("length");
-      String data = (String) map.get("data");
-
-      return new LogData(offset, length, data);
-    }
-
     @Override
     public String toString() {
-      return "[offset=" + offset + ",length=" + length + ",data=" + data + "]";
+      return "[offset=" + this.offset + ",length=" + this.length + ",data=" + this.data + "]";
     }
   }
 
   public static class JobMetaData {
-    private int offset;
-    private int length;
-    private String data;
 
-    public JobMetaData(int offset, int length, String data) {
+    private final int offset;
+    private final int length;
+    private final String data;
+
+    public JobMetaData(final int offset, final int length, final String data) {
       this.offset = offset;
       this.length = length;
       this.data = data;
     }
 
+    public static JobMetaData createJobMetaDataFromObject(
+        final Map<String, Object> map) {
+      final int offset = (Integer) map.get("offset");
+      final int length = (Integer) map.get("length");
+      final String data = (String) map.get("data");
+
+      return new JobMetaData(offset, length, data);
+    }
+
     public int getOffset() {
-      return offset;
+      return this.offset;
     }
 
     public int getLength() {
-      return length;
+      return this.length;
     }
 
     public String getData() {
-      return data;
+      return this.data;
     }
 
     public Map<String, Object> toObject() {
-      HashMap<String, Object> map = new HashMap<String, Object>();
-      map.put("offset", offset);
-      map.put("length", length);
-      map.put("data", data);
+      final HashMap<String, Object> map = new HashMap<>();
+      map.put("offset", this.offset);
+      map.put("length", this.length);
+      map.put("data", this.data);
 
       return map;
     }
 
-    public static JobMetaData createJobMetaDataFromObject(
-        Map<String, Object> map) {
-      int offset = (Integer) map.get("offset");
-      int length = (Integer) map.get("length");
-      String data = (String) map.get("data");
-
-      return new JobMetaData(offset, length, data);
-    }
-
     @Override
     public String toString() {
-      return "[offset=" + offset + ",length=" + length + ",data=" + data + "]";
+      return "[offset=" + this.offset + ",length=" + this.length + ",data=" + this.data + "]";
     }
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/FlowUtils.java b/azkaban-common/src/main/java/azkaban/utils/FlowUtils.java
index 9268d2c..9f877eb 100644
--- a/azkaban-common/src/main/java/azkaban/utils/FlowUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/FlowUtils.java
@@ -24,33 +24,30 @@ import java.util.Map;
 
 
 public class FlowUtils {
+
   /**
    * Change job status to disabled in exflow if the job is in disabledJobs
-   * @param disabledJobs
-   * @param exflow
    */
-  public static void applyDisabledJobs(List<Object> disabledJobs,
-      ExecutableFlowBase exflow) {
-    for (Object disabled : disabledJobs) {
+  public static void applyDisabledJobs(final List<Object> disabledJobs,
+      final ExecutableFlowBase exflow) {
+    for (final Object disabled : disabledJobs) {
       if (disabled instanceof String) {
-        String nodeName = (String) disabled;
-        ExecutableNode node = exflow.getExecutableNode(nodeName);
+        final String nodeName = (String) disabled;
+        final ExecutableNode node = exflow.getExecutableNode(nodeName);
         if (node != null) {
           node.setStatus(Status.DISABLED);
         }
       } else if (disabled instanceof Map) {
-        @SuppressWarnings("unchecked")
-        Map<String, Object> nestedDisabled = (Map<String, Object>) disabled;
-        String nodeName = (String) nestedDisabled.get("id");
-        @SuppressWarnings("unchecked")
-        List<Object> subDisabledJobs =
+        final Map<String, Object> nestedDisabled = (Map<String, Object>) disabled;
+        final String nodeName = (String) nestedDisabled.get("id");
+        final List<Object> subDisabledJobs =
             (List<Object>) nestedDisabled.get("children");
 
         if (nodeName == null || subDisabledJobs == null) {
           return;
         }
 
-        ExecutableNode node = exflow.getExecutableNode(nodeName);
+        final ExecutableNode node = exflow.getExecutableNode(nodeName);
         if (node != null && node instanceof ExecutableFlowBase) {
           applyDisabledJobs(subDisabledJobs, (ExecutableFlowBase) node);
         }
diff --git a/azkaban-common/src/main/java/azkaban/utils/GZIPUtils.java b/azkaban-common/src/main/java/azkaban/utils/GZIPUtils.java
index 4315a06..97f7472 100644
--- a/azkaban-common/src/main/java/azkaban/utils/GZIPUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/GZIPUtils.java
@@ -21,25 +21,24 @@ import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
-
 import org.apache.commons.io.IOUtils;
 
 public class GZIPUtils {
 
-  public static byte[] gzipString(String str, String encType)
+  public static byte[] gzipString(final String str, final String encType)
       throws IOException {
-    byte[] stringData = str.getBytes(encType);
+    final byte[] stringData = str.getBytes(encType);
 
     return gzipBytes(stringData);
   }
 
-  public static byte[] gzipBytes(byte[] bytes) throws IOException {
+  public static byte[] gzipBytes(final byte[] bytes) throws IOException {
     return gzipBytes(bytes, 0, bytes.length);
   }
 
-  public static byte[] gzipBytes(byte[] bytes, int offset, int length)
+  public static byte[] gzipBytes(final byte[] bytes, final int offset, final int length)
       throws IOException {
-    ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
+    final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
     GZIPOutputStream gzipStream = null;
 
     gzipStream = new GZIPOutputStream(byteOutputStream);
@@ -49,19 +48,19 @@ public class GZIPUtils {
     return byteOutputStream.toByteArray();
   }
 
-  public static byte[] unGzipBytes(byte[] bytes) throws IOException {
-    ByteArrayInputStream byteInputStream = new ByteArrayInputStream(bytes);
-    GZIPInputStream gzipInputStream = new GZIPInputStream(byteInputStream);
+  public static byte[] unGzipBytes(final byte[] bytes) throws IOException {
+    final ByteArrayInputStream byteInputStream = new ByteArrayInputStream(bytes);
+    final GZIPInputStream gzipInputStream = new GZIPInputStream(byteInputStream);
 
-    ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
+    final ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
     IOUtils.copy(gzipInputStream, byteOutputStream);
 
     return byteOutputStream.toByteArray();
   }
 
-  public static String unGzipString(byte[] bytes, String encType)
+  public static String unGzipString(final byte[] bytes, final String encType)
       throws IOException {
-    byte[] response = unGzipBytes(bytes);
+    final byte[] response = unGzipBytes(bytes);
     return new String(response, encType);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/JSONUtils.java b/azkaban-common/src/main/java/azkaban/utils/JSONUtils.java
index d6a0cf5..4ca362d 100644
--- a/azkaban-common/src/main/java/azkaban/utils/JSONUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/JSONUtils.java
@@ -27,7 +27,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
-
 import org.codehaus.jackson.JsonFactory;
 import org.codehaus.jackson.JsonNode;
 import org.codehaus.jackson.JsonParser;
@@ -42,49 +41,50 @@ public class JSONUtils {
   private JSONUtils() {
   }
 
-  public static String toJSON(Object obj) {
+  public static String toJSON(final Object obj) {
     return toJSON(obj, false);
   }
 
-  public static String toJSON(Object obj, boolean prettyPrint) {
-    ObjectMapper mapper = new ObjectMapper();
+  public static String toJSON(final Object obj, final boolean prettyPrint) {
+    final ObjectMapper mapper = new ObjectMapper();
 
     try {
       if (prettyPrint) {
-        ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
+        final ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
         return writer.writeValueAsString(obj);
       }
       return mapper.writeValueAsString(obj);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new RuntimeException(e);
     }
   }
 
-  public static void toJSON(Object obj, OutputStream stream) {
+  public static void toJSON(final Object obj, final OutputStream stream) {
     toJSON(obj, stream, false);
   }
 
-  public static void toJSON(Object obj, OutputStream stream, boolean prettyPrint) {
-    ObjectMapper mapper = new ObjectMapper();
+  public static void toJSON(final Object obj, final OutputStream stream,
+      final boolean prettyPrint) {
+    final ObjectMapper mapper = new ObjectMapper();
     try {
       if (prettyPrint) {
-        ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
+        final ObjectWriter writer = mapper.writerWithDefaultPrettyPrinter();
         writer.writeValue(stream, obj);
         return;
       }
       mapper.writeValue(stream, obj);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new RuntimeException(e);
     }
   }
 
-  public static void toJSON(Object obj, File file) throws IOException {
+  public static void toJSON(final Object obj, final File file) throws IOException {
     toJSON(obj, file, false);
   }
 
-  public static void toJSON(Object obj, File file, boolean prettyPrint)
+  public static void toJSON(final Object obj, final File file, final boolean prettyPrint)
       throws IOException {
-    BufferedOutputStream stream =
+    final BufferedOutputStream stream =
         new BufferedOutputStream(new FileOutputStream(file));
     try {
       toJSON(obj, stream, prettyPrint);
@@ -93,60 +93,60 @@ public class JSONUtils {
     }
   }
 
-  public static Object parseJSONFromStringQuiet(String json) {
+  public static Object parseJSONFromStringQuiet(final String json) {
     try {
       return parseJSONFromString(json);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       e.printStackTrace();
       return null;
     }
   }
 
-  public static Object parseJSONFromString(String json) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JsonFactory factory = new JsonFactory();
-    JsonParser parser = factory.createJsonParser(json);
-    JsonNode node = mapper.readTree(parser);
+  public static Object parseJSONFromString(final String json) throws IOException {
+    final ObjectMapper mapper = new ObjectMapper();
+    final JsonFactory factory = new JsonFactory();
+    final JsonParser parser = factory.createJsonParser(json);
+    final JsonNode node = mapper.readTree(parser);
 
     return toObjectFromJSONNode(node);
   }
 
-  public static Object parseJSONFromFile(File file) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JsonFactory factory = new JsonFactory();
-    JsonParser parser = factory.createJsonParser(file);
-    JsonNode node = mapper.readTree(parser);
+  public static Object parseJSONFromFile(final File file) throws IOException {
+    final ObjectMapper mapper = new ObjectMapper();
+    final JsonFactory factory = new JsonFactory();
+    final JsonParser parser = factory.createJsonParser(file);
+    final JsonNode node = mapper.readTree(parser);
 
     return toObjectFromJSONNode(node);
   }
 
-  public static Object parseJSONFromReader(Reader reader) throws IOException {
-    ObjectMapper mapper = new ObjectMapper();
-    JsonFactory factory = new JsonFactory();
-    JsonParser parser = factory.createJsonParser(reader);
-    JsonNode node = mapper.readTree(parser);
+  public static Object parseJSONFromReader(final Reader reader) throws IOException {
+    final ObjectMapper mapper = new ObjectMapper();
+    final JsonFactory factory = new JsonFactory();
+    final JsonParser parser = factory.createJsonParser(reader);
+    final JsonNode node = mapper.readTree(parser);
 
     return toObjectFromJSONNode(node);
   }
 
-  private static Object toObjectFromJSONNode(JsonNode node) {
+  private static Object toObjectFromJSONNode(final JsonNode node) {
     if (node.isObject()) {
-      HashMap<String, Object> obj = new HashMap<String, Object>();
-      Iterator<String> iter = node.getFieldNames();
+      final HashMap<String, Object> obj = new HashMap<>();
+      final Iterator<String> iter = node.getFieldNames();
       while (iter.hasNext()) {
-        String fieldName = iter.next();
-        JsonNode subNode = node.get(fieldName);
-        Object subObj = toObjectFromJSONNode(subNode);
+        final String fieldName = iter.next();
+        final JsonNode subNode = node.get(fieldName);
+        final Object subObj = toObjectFromJSONNode(subNode);
         obj.put(fieldName, subObj);
       }
 
       return obj;
     } else if (node.isArray()) {
-      ArrayList<Object> array = new ArrayList<Object>();
-      Iterator<JsonNode> iter = node.getElements();
+      final ArrayList<Object> array = new ArrayList<>();
+      final Iterator<JsonNode> iter = node.getElements();
       while (iter.hasNext()) {
-        JsonNode element = iter.next();
-        Object subObject = toObjectFromJSONNode(element);
+        final JsonNode element = iter.next();
+        final Object subObject = toObjectFromJSONNode(element);
         array.add(subObject);
       }
       return array;
@@ -170,7 +170,7 @@ public class JSONUtils {
     }
   }
 
-  public static long getLongFromObject(Object obj) {
+  public static long getLongFromObject(final Object obj) {
     if (obj instanceof Integer) {
       return Long.valueOf((Integer) obj);
     }
@@ -187,12 +187,12 @@ public class JSONUtils {
    *
    * The other json writing methods are more robust and will handle more cases.
    */
-  public static void writePropsNoJarDependency(Map<String, String> properties,
-      Writer writer) throws IOException {
+  public static void writePropsNoJarDependency(final Map<String, String> properties,
+      final Writer writer) throws IOException {
     writer.write("{\n");
     int size = properties.size();
 
-    for (Map.Entry<String, String> entry : properties.entrySet()) {
+    for (final Map.Entry<String, String> entry : properties.entrySet()) {
       // tab the space
       writer.write('\t');
       // Write key
@@ -210,57 +210,57 @@ public class JSONUtils {
     writer.write("}");
   }
 
-  private static String quoteAndClean(String str) {
+  private static String quoteAndClean(final String str) {
     if (str == null || str.isEmpty()) {
       return "\"\"";
     }
 
-    StringBuffer buffer = new StringBuffer(str.length());
+    final StringBuffer buffer = new StringBuffer(str.length());
     buffer.append('"');
     for (int i = 0; i < str.length(); ++i) {
-      char ch = str.charAt(i);
+      final char ch = str.charAt(i);
 
       switch (ch) {
-      case '\b':
-        buffer.append("\\b");
-        break;
-      case '\t':
-        buffer.append("\\t");
-        break;
-      case '\n':
-        buffer.append("\\n");
-        break;
-      case '\f':
-        buffer.append("\\f");
-        break;
-      case '\r':
-        buffer.append("\\r");
-        break;
-      case '"':
-      case '\\':
-      case '/':
-        buffer.append('\\');
-        buffer.append(ch);
-        break;
-      default:
-        if (isCharSpecialUnicode(ch)) {
-          buffer.append("\\u");
-          String hexCode = Integer.toHexString(ch);
-          int lengthHexCode = hexCode.length();
-          if (lengthHexCode < 4) {
-            buffer.append("0000".substring(0, 4 - lengthHexCode));
-          }
-          buffer.append(hexCode);
-        } else {
+        case '\b':
+          buffer.append("\\b");
+          break;
+        case '\t':
+          buffer.append("\\t");
+          break;
+        case '\n':
+          buffer.append("\\n");
+          break;
+        case '\f':
+          buffer.append("\\f");
+          break;
+        case '\r':
+          buffer.append("\\r");
+          break;
+        case '"':
+        case '\\':
+        case '/':
+          buffer.append('\\');
           buffer.append(ch);
-        }
+          break;
+        default:
+          if (isCharSpecialUnicode(ch)) {
+            buffer.append("\\u");
+            final String hexCode = Integer.toHexString(ch);
+            final int lengthHexCode = hexCode.length();
+            if (lengthHexCode < 4) {
+              buffer.append("0000".substring(0, 4 - lengthHexCode));
+            }
+            buffer.append(hexCode);
+          } else {
+            buffer.append(ch);
+          }
       }
     }
     buffer.append('"');
     return buffer.toString();
   }
 
-  private static boolean isCharSpecialUnicode(char ch) {
+  private static boolean isCharSpecialUnicode(final char ch) {
     if (ch < ' ') {
       return true;
     } else if (ch >= '\u0080' && ch < '\u00a0') {
diff --git a/azkaban-common/src/main/java/azkaban/utils/LogGobbler.java b/azkaban-common/src/main/java/azkaban/utils/LogGobbler.java
index 7dcd5c0..1a83b53 100644
--- a/azkaban-common/src/main/java/azkaban/utils/LogGobbler.java
+++ b/azkaban-common/src/main/java/azkaban/utils/LogGobbler.java
@@ -16,16 +16,15 @@
 
 package azkaban.utils;
 
+import com.google.common.base.Joiner;
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.Reader;
-
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
-import com.google.common.base.Joiner;
-
 public class LogGobbler extends Thread {
+
   private final BufferedReader inputReader;
   private final Logger logger;
   private final Level loggingLevel;
@@ -36,54 +35,54 @@ public class LogGobbler extends Thread {
     this.inputReader = new BufferedReader(inputReader);
     this.logger = logger;
     this.loggingLevel = level;
-    buffer = new CircularBuffer<String>(bufferLines);
+    this.buffer = new CircularBuffer<>(bufferLines);
   }
 
   @Override
   public void run() {
     try {
       while (!Thread.currentThread().isInterrupted()) {
-        String line = inputReader.readLine();
+        final String line = this.inputReader.readLine();
         if (line == null) {
           return;
         }
 
-        buffer.append(line);
+        this.buffer.append(line);
         log(line);
       }
-    } catch (IOException e) {
+    } catch (final IOException e) {
       error("Error reading from logging stream:", e);
     }
   }
 
-  private void log(String message) {
-    if (logger != null) {
-      logger.log(loggingLevel, message);
+  private void log(final String message) {
+    if (this.logger != null) {
+      this.logger.log(this.loggingLevel, message);
     }
   }
 
-  private void error(String message, Exception e) {
-    if (logger != null) {
-      logger.error(message, e);
+  private void error(final String message, final Exception e) {
+    if (this.logger != null) {
+      this.logger.error(message, e);
     }
   }
 
-  private void info(String message, Exception e) {
-    if (logger != null) {
-      logger.info(message, e);
+  private void info(final String message, final Exception e) {
+    if (this.logger != null) {
+      this.logger.info(message, e);
     }
   }
 
   public void awaitCompletion(final long waitMs) {
     try {
       join(waitMs);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       info("I/O thread interrupted.", e);
     }
   }
 
   public String getRecentLog() {
-    return Joiner.on(System.getProperty("line.separator")).join(buffer);
+    return Joiner.on(System.getProperty("line.separator")).join(this.buffer);
   }
 
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/Md5Hasher.java b/azkaban-common/src/main/java/azkaban/utils/Md5Hasher.java
index 5baaf7b..d0d75d8 100644
--- a/azkaban-common/src/main/java/azkaban/utils/Md5Hasher.java
+++ b/azkaban-common/src/main/java/azkaban/utils/Md5Hasher.java
@@ -1,12 +1,12 @@
 /*
  * Copyright 2012 LinkedIn Corp.
- * 
+ *
  * Licensed under the Apache License, Version 2.0 (the "License"); you may not
  * use this file except in compliance with the License. You may obtain a copy of
  * the License at
- * 
+ *
  * http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
@@ -35,21 +35,21 @@ public class Md5Hasher {
     MessageDigest digest = null;
     try {
       digest = MessageDigest.getInstance("MD5");
-    } catch (NoSuchAlgorithmException e) {
+    } catch (final NoSuchAlgorithmException e) {
       // Should never get here.
     }
 
     return digest;
   }
 
-  public static byte[] md5Hash(File file) throws IOException {
-    MessageDigest digest = getMd5Digest();
+  public static byte[] md5Hash(final File file) throws IOException {
+    final MessageDigest digest = getMd5Digest();
 
-    FileInputStream fStream = new FileInputStream(file);
-    BufferedInputStream bStream = new BufferedInputStream(fStream);
-    DigestInputStream blobStream = new DigestInputStream(bStream, digest);
+    final FileInputStream fStream = new FileInputStream(file);
+    final BufferedInputStream bStream = new BufferedInputStream(fStream);
+    final DigestInputStream blobStream = new DigestInputStream(bStream, digest);
 
-    byte[] buffer = new byte[BYTE_BUFFER_SIZE];
+    final byte[] buffer = new byte[BYTE_BUFFER_SIZE];
 
     int num = 0;
     do {
diff --git a/azkaban-common/src/main/java/azkaban/utils/MessageAttachmentExceededMaximumSizeException.java b/azkaban-common/src/main/java/azkaban/utils/MessageAttachmentExceededMaximumSizeException.java
index 344bc86..bd2261f 100644
--- a/azkaban-common/src/main/java/azkaban/utils/MessageAttachmentExceededMaximumSizeException.java
+++ b/azkaban-common/src/main/java/azkaban/utils/MessageAttachmentExceededMaximumSizeException.java
@@ -4,9 +4,8 @@ import javax.mail.MessagingException;
 
 /**
  * To indicate the attachment size is larger than allowed size
- * 
- * @author hluu
  *
+ * @author hluu
  */
 public class MessageAttachmentExceededMaximumSizeException extends
     MessagingException {
@@ -15,11 +14,11 @@ public class MessageAttachmentExceededMaximumSizeException extends
     super();
   }
 
-  public MessageAttachmentExceededMaximumSizeException(String s) {
+  public MessageAttachmentExceededMaximumSizeException(final String s) {
     super(s);
   }
 
-  public MessageAttachmentExceededMaximumSizeException(String s, Exception e) {
+  public MessageAttachmentExceededMaximumSizeException(final String s, final Exception e) {
     super(s, e);
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/utils/OsMemoryUtil.java b/azkaban-common/src/main/java/azkaban/utils/OsMemoryUtil.java
index 1b52522..5c46789 100644
--- a/azkaban-common/src/main/java/azkaban/utils/OsMemoryUtil.java
+++ b/azkaban-common/src/main/java/azkaban/utils/OsMemoryUtil.java
@@ -18,16 +18,20 @@ import org.slf4j.LoggerFactory;
  * This check is designed for Linux only.
  */
 class OsMemoryUtil {
+
   private static final Logger logger = LoggerFactory.getLogger(OsMemoryUtil.class);
 
   // This file is used by Linux. It doesn't exist on Mac for example.
   private static final String MEM_INFO_FILE = "/proc/meminfo";
 
-  private static final Set<String> MEM_KEYS = ImmutableSet.of("MemFree", "Buffers", "Cached", "SwapFree");
+  private static final Set<String> MEM_KEYS = ImmutableSet
+      .of("MemFree", "Buffers", "Cached", "SwapFree");
 
   /**
    * Includes OS cache and free swap.
-   * @return the total free memory size of the OS. 0 if there is an error or the OS doesn't support this memory check.
+   *
+   * @return the total free memory size of the OS. 0 if there is an error or the OS doesn't support
+   * this memory check.
    */
   long getOsTotalFreeMemorySize() {
     if (!Files.isRegularFile(Paths.get(MEM_INFO_FILE))) {
@@ -35,13 +39,13 @@ class OsMemoryUtil {
       return 0;
     }
 
-    List<String> lines;
+    final List<String> lines;
     // The file /proc/meminfo is assumed to contain only ASCII characters.
     // The assumption is that the file is not too big. So it is simpler to read the whole file into memory.
     try {
       lines = Files.readAllLines(Paths.get(MEM_INFO_FILE), StandardCharsets.UTF_8);
-    } catch (IOException e) {
-      String errMsg = "Failed to open mem info file: " + MEM_INFO_FILE;
+    } catch (final IOException e) {
+      final String errMsg = "Failed to open mem info file: " + MEM_INFO_FILE;
       logger.error(errMsg, e);
       return 0;
     }
@@ -49,19 +53,18 @@ class OsMemoryUtil {
   }
 
   /**
-   *
    * @param lines text lines from the procinfo file
    * @return the total size of free memory in kB. 0 if there is an error.
    */
-  long getOsTotalFreeMemorySizeFromStrings(List<String> lines) {
+  long getOsTotalFreeMemorySizeFromStrings(final List<String> lines) {
     long totalFree = 0;
     int count = 0;
 
-    for (String line : lines) {
-      for (String keyName : MEM_KEYS) {
+    for (final String line : lines) {
+      for (final String keyName : MEM_KEYS) {
         if (line.startsWith(keyName)) {
           count++;
-          long size = parseMemoryLine(line);
+          final long size = parseMemoryLine(line);
           if (size == 0) {
             return 0;
           }
@@ -70,9 +73,10 @@ class OsMemoryUtil {
       }
     }
 
-    int length = MEM_KEYS.size();
+    final int length = MEM_KEYS.size();
     if (count != length) {
-      String errMsg = String.format("Expect %d keys in the meminfo file. Got %d. content: %s", length, count, lines);
+      final String errMsg = String
+          .format("Expect %d keys in the meminfo file. Got %d. content: %s", length, count, lines);
       logger.error(errMsg);
       totalFree = 0;
     }
@@ -82,26 +86,26 @@ class OsMemoryUtil {
   /**
    * Example file:
    * $ cat /proc/meminfo
-   *   MemTotal:       65894008 kB
-   *   MemFree:        59400536 kB
-   *   Buffers:          409348 kB
-   *   Cached:          4290236 kB
-   *   SwapCached:            0 kB
+   * MemTotal:       65894008 kB
+   * MemFree:        59400536 kB
+   * Buffers:          409348 kB
+   * Cached:          4290236 kB
+   * SwapCached:            0 kB
    *
    * Make the method package private to make unit testing easier.
    * Otherwise it can be made private.
-
+   *
    * @param line the text for a memory usage statistics we are interested in
    * @return size of the memory. unit kB. 0 if there is an error.
    */
-  long parseMemoryLine(String line) {
-    int idx1 = line.indexOf(":");
-    int idx2 = line.lastIndexOf("kB");
-    String sizeString = line.substring(idx1 + 1, idx2 - 1).trim();
+  long parseMemoryLine(final String line) {
+    final int idx1 = line.indexOf(":");
+    final int idx2 = line.lastIndexOf("kB");
+    final String sizeString = line.substring(idx1 + 1, idx2 - 1).trim();
     try {
       return Long.parseLong(sizeString);
-    } catch (NumberFormatException e) {
-      String err = "Failed to parse the meminfo file. Line: " + line;
+    } catch (final NumberFormatException e) {
+      final String err = "Failed to parse the meminfo file. Line: " + line;
       logger.error(err);
       return 0;
     }
diff --git a/azkaban-common/src/main/java/azkaban/utils/Pair.java b/azkaban-common/src/main/java/azkaban/utils/Pair.java
index fddf7fa..3ab7f1b 100644
--- a/azkaban-common/src/main/java/azkaban/utils/Pair.java
+++ b/azkaban-common/src/main/java/azkaban/utils/Pair.java
@@ -18,61 +18,65 @@ package azkaban.utils;
 
 /**
  * Pair of values.
- *
- * @param <F>
- * @param <S>
  */
 public class Pair<F, S> {
+
   private final F first;
   private final S second;
 
-  public Pair(F first, S second) {
+  public Pair(final F first, final S second) {
     this.first = first;
     this.second = second;
   }
 
   public F getFirst() {
-    return first;
+    return this.first;
   }
 
   public S getSecond() {
-    return second;
+    return this.second;
   }
 
   @Override
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + ((first == null) ? 0 : first.hashCode());
-    result = prime * result + ((second == null) ? 0 : second.hashCode());
+    result = prime * result + ((this.first == null) ? 0 : this.first.hashCode());
+    result = prime * result + ((this.second == null) ? 0 : this.second.hashCode());
     return result;
   }
 
   @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
+  public boolean equals(final Object obj) {
+    if (this == obj) {
       return true;
-    if (obj == null)
+    }
+    if (obj == null) {
       return false;
-    if (getClass() != obj.getClass())
+    }
+    if (getClass() != obj.getClass()) {
       return false;
-    @SuppressWarnings("rawtypes")
-    Pair other = (Pair) obj;
-    if (first == null) {
-      if (other.first != null)
+    }
+    final Pair other = (Pair) obj;
+    if (this.first == null) {
+      if (other.first != null) {
         return false;
-    } else if (!first.equals(other.first))
+      }
+    } else if (!this.first.equals(other.first)) {
       return false;
-    if (second == null) {
-      if (other.second != null)
+    }
+    if (this.second == null) {
+      if (other.second != null) {
         return false;
-    } else if (!second.equals(other.second))
+      }
+    } else if (!this.second.equals(other.second)) {
       return false;
+    }
     return true;
   }
 
   @Override
   public String toString() {
-    return "{" + first.toString() + "," + second.toString() + "}";
+    return "{" + this.first.toString() + "," + this.second.toString() + "}";
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/PatternLayoutEscaped.java b/azkaban-common/src/main/java/azkaban/utils/PatternLayoutEscaped.java
index 707c33a..2afe703 100644
--- a/azkaban-common/src/main/java/azkaban/utils/PatternLayoutEscaped.java
+++ b/azkaban-common/src/main/java/azkaban/utils/PatternLayoutEscaped.java
@@ -4,13 +4,14 @@ import org.apache.log4j.PatternLayout;
 import org.apache.log4j.spi.LoggingEvent;
 
 /**
- * When we use the log4j Kafka appender, it seems that the appender simply does not log the stack trace anywhere
- * Seeing as the stack trace is a very important piece of information, we create our own PatternLayout class that
- * appends the stack trace to the log message that reported it, so that all the information regarding that error
- * can be found one in place.
+ * When we use the log4j Kafka appender, it seems that the appender simply does not log the stack
+ * trace anywhere Seeing as the stack trace is a very important piece of information, we create our
+ * own PatternLayout class that appends the stack trace to the log message that reported it, so that
+ * all the information regarding that error can be found one in place.
  */
 public class PatternLayoutEscaped extends PatternLayout {
-  public PatternLayoutEscaped(String s) {
+
+  public PatternLayoutEscaped(final String s) {
     super(s);
   }
 
@@ -27,16 +28,16 @@ public class PatternLayoutEscaped extends PatternLayout {
   }
 
   /**
-   * Create a copy of event, but append a stack trace to the message (if it exists).
-   * Then it escapes the backslashes, tabs, newlines and quotes in its message as we are sending it as JSON and we
+   * Create a copy of event, but append a stack trace to the message (if it exists). Then it escapes
+   * the backslashes, tabs, newlines and quotes in its message as we are sending it as JSON and we
    * don't want any corruption of the JSON object.
    */
-  private LoggingEvent appendStackTraceToEvent(LoggingEvent event) {
+  private LoggingEvent appendStackTraceToEvent(final LoggingEvent event) {
     String message = event.getMessage().toString();
     // If there is a stack trace available, print it out
     if (event.getThrowableInformation() != null) {
-      String[] s = event.getThrowableStrRep();
-      for (String line: s) {
+      final String[] s = event.getThrowableStrRep();
+      for (final String line : s) {
         message += "\n" + line;
       }
     }
@@ -46,7 +47,7 @@ public class PatternLayoutEscaped extends PatternLayout {
         .replace("\"", "\\\"")
         .replace("\t", "\\t");
 
-    Throwable throwable = event.getThrowableInformation() == null ? null
+    final Throwable throwable = event.getThrowableInformation() == null ? null
         : event.getThrowableInformation().getThrowable();
     return new LoggingEvent(event.getFQNOfLoggerClass(),
         event.getLogger(),
diff --git a/azkaban-common/src/main/java/azkaban/utils/Props.java b/azkaban-common/src/main/java/azkaban/utils/Props.java
index 68fcbd3..d0fc84c 100644
--- a/azkaban-common/src/main/java/azkaban/utils/Props.java
+++ b/azkaban-common/src/main/java/azkaban/utils/Props.java
@@ -35,7 +35,6 @@ import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 import java.util.TreeMap;
-
 import org.apache.log4j.Logger;
 
 /**
@@ -43,6 +42,7 @@ import org.apache.log4j.Logger;
  * functions and Exception throwing. This class is not threadsafe.
  */
 public class Props {
+
   private final Map<String, String> _current;
   private Props _parent;
   private String source = null;
@@ -56,40 +56,30 @@ public class Props {
 
   /**
    * Constructor for empty Props with parent override.
-   *
-   * @param parent
    */
-  public Props(Props parent) {
-    this._current = new HashMap<String, String>();
+  public Props(final Props parent) {
+    this._current = new HashMap<>();
     this._parent = parent;
   }
 
   /**
    * Load props from a file.
-   *
-   * @param parent
-   * @param file
-   * @throws IOException
    */
-  public Props(Props parent, String filepath) throws IOException {
+  public Props(final Props parent, final String filepath) throws IOException {
     this(parent, new File(filepath));
   }
 
   /**
    * Load props from a file.
-   *
-   * @param parent
-   * @param file
-   * @throws IOException
    */
-  public Props(Props parent, File file) throws IOException {
+  public Props(final Props parent, final File file) throws IOException {
     this(parent);
     setSource(file.getPath());
 
-    InputStream input = new BufferedInputStream(new FileInputStream(file));
+    final InputStream input = new BufferedInputStream(new FileInputStream(file));
     try {
       loadFrom(input);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw e;
     } finally {
       input.close();
@@ -98,34 +88,16 @@ public class Props {
 
   /**
    * Create props from property input streams
-   *
-   * @param parent
-   * @param inputStreams
-   * @throws IOException
    */
-  public Props(Props parent, InputStream inputStream) throws IOException {
+  public Props(final Props parent, final InputStream inputStream) throws IOException {
     this(parent);
     loadFrom(inputStream);
   }
 
   /**
-   *
-   * @param inputStream
-   * @throws IOException
-   */
-  private void loadFrom(InputStream inputStream) throws IOException {
-    Properties properties = new Properties();
-    properties.load(inputStream);
-    this.put(properties);
-  }
-
-  /**
    * Create properties from maps of properties
-   *
-   * @param parent
-   * @param props
    */
-  public Props(Props parent, Map<String, String>... props) {
+  public Props(final Props parent, final Map<String, String>... props) {
     this(parent);
     for (int i = props.length - 1; i >= 0; i--) {
       this.putAll(props[i]);
@@ -134,11 +106,8 @@ public class Props {
 
   /**
    * Create properties from Properties objects
-   *
-   * @param parent
-   * @param properties
    */
-  public Props(Props parent, Properties... properties) {
+  public Props(final Props parent, final Properties... properties) {
     this(parent);
     for (int i = properties.length - 1; i >= 0; i--) {
       this.put(properties[i]);
@@ -147,56 +116,33 @@ public class Props {
 
   /**
    * Create a Props object with the contents set to that of props.
-   *
-   * @param parent
-   * @param props
    */
-  public Props(Props parent, Props props) {
+  public Props(final Props parent, final Props props) {
     this(parent);
     if (props != null) {
       putAll(props);
     }
   }
 
-  public void setEarliestAncestor(Props parent) {
-    Props props = getEarliestAncestor();
-    props.setParent(parent);
-  }
-
-  public Props getEarliestAncestor() {
-    if (_parent == null) {
-      return this;
-    }
-
-    return _parent.getEarliestAncestor();
-  }
-
   /**
    * Create a Props with a null parent from a list of key value pairing. i.e.
    * [key1, value1, key2, value2 ...]
-   *
-   * @param args
-   * @return
    */
-  public static Props of(String... args) {
+  public static Props of(final String... args) {
     return of((Props) null, args);
   }
 
   /**
    * Create a Props from a list of key value pairing. i.e. [key1, value1, key2,
    * value2 ...]
-   *
-   * @param args
-   * @return
    */
-  @SuppressWarnings("unchecked")
-  public static Props of(Props parent, String... args) {
+  public static Props of(final Props parent, final String... args) {
     if (args.length % 2 != 0) {
       throw new IllegalArgumentException(
           "Must have an equal number of keys and values.");
     }
 
-    Map<String, String> vals = new HashMap<String, String>(args.length / 2);
+    final Map<String, String> vals = new HashMap<>(args.length / 2);
 
     for (int i = 0; i < args.length; i += 2) {
       vals.put(args[i], args[i + 1]);
@@ -205,45 +151,85 @@ public class Props {
   }
 
   /**
+   * Clones the Props p object and all of its parents.
+   */
+  public static Props clone(final Props p) {
+    return copyNext(p);
+  }
+
+  /**
+   *
+   * @param source
+   * @return
+   */
+  private static Props copyNext(final Props source) {
+    Props priorNodeCopy = null;
+    if (source.getParent() != null) {
+      priorNodeCopy = copyNext(source.getParent());
+    }
+    final Props dest = new Props(priorNodeCopy);
+    for (final String key : source.localKeySet()) {
+      dest.put(key, source.get(key));
+    }
+
+    return dest;
+  }
+
+  /**
+   *
+   * @param inputStream
+   * @throws IOException
+   */
+  private void loadFrom(final InputStream inputStream) throws IOException {
+    final Properties properties = new Properties();
+    properties.load(inputStream);
+    this.put(properties);
+  }
+
+  public Props getEarliestAncestor() {
+    if (this._parent == null) {
+      return this;
+    }
+
+    return this._parent.getEarliestAncestor();
+  }
+
+  public void setEarliestAncestor(final Props parent) {
+    final Props props = getEarliestAncestor();
+    props.setParent(parent);
+  }
+
+  /**
    * Clear the current Props, but leaves the parent untouched.
    */
   public void clearLocal() {
-    _current.clear();
+    this._current.clear();
   }
 
   /**
    * Check key in current Props then search in parent
-   *
-   * @param k
-   * @return
    */
-  public boolean containsKey(Object k) {
-    return _current.containsKey(k)
-        || (_parent != null && _parent.containsKey(k));
+  public boolean containsKey(final Object k) {
+    return this._current.containsKey(k)
+        || (this._parent != null && this._parent.containsKey(k));
   }
 
   /**
    * Check value in current Props then search in parent
-   *
-   * @param value
-   * @return
    */
-  public boolean containsValue(Object value) {
-    return _current.containsValue(value)
-        || (_parent != null && _parent.containsValue(value));
+  public boolean containsValue(final Object value) {
+    return this._current.containsValue(value)
+        || (this._parent != null && this._parent.containsValue(value));
   }
 
   /**
    * Return value if available in current Props otherwise return from parent
-   *
-   * @param key
-   * @return
    */
-  public String get(Object key) {
-    if (_current.containsKey(key)) {
-      return _current.get(key);
-    } else if (_parent != null) {
-      return _parent.get(key);
+  public String get(final Object key) {
+    if (this._current.containsKey(key)) {
+      return this._current.get(key);
+    } else if (this._parent != null) {
+      return this._parent.get(key);
     } else {
       return null;
     }
@@ -251,20 +237,20 @@ public class Props {
 
   /**
    * Get the key set from the current Props
-   *
-   * @return
    */
   public Set<String> localKeySet() {
-    return _current.keySet();
+    return this._current.keySet();
   }
 
   /**
    * Get parent Props
-   *
-   * @return
    */
   public Props getParent() {
-    return _parent;
+    return this._parent;
+  }
+
+  public void setParent(final Props prop) {
+    this._parent = prop;
   }
 
   /**
@@ -274,12 +260,11 @@ public class Props {
    *
    * @param key The key to put the value to
    * @param value The value to do substitution on and store
-   *
-   * @throws IllegalArgumentException If the variable given for substitution is
-   *           not a valid key in this Props.
+   * @throws IllegalArgumentException If the variable given for substitution is not a valid key in
+   * this Props.
    */
-  public String put(String key, String value) {
-    return _current.put(key, value);
+  public String put(final String key, final String value) {
+    return this._current.put(key, value);
   }
 
   /**
@@ -289,98 +274,76 @@ public class Props {
    * Properties object.
    *
    * @param properties The properties to put
-   *
-   * @throws IllegalArgumentException If the variable given for substitution is
-   *           not a valid key in this Props.
+   * @throws IllegalArgumentException If the variable given for substitution is not a valid key in
+   * this Props.
    */
-  public void put(Properties properties) {
-    for (String propName : properties.stringPropertyNames()) {
-      _current.put(propName, properties.getProperty(propName));
+  public void put(final Properties properties) {
+    for (final String propName : properties.stringPropertyNames()) {
+      this._current.put(propName, properties.getProperty(propName));
     }
   }
 
   /**
    * Put integer
-   *
-   * @param key
-   * @param value
-   * @return
    */
-  public String put(String key, Integer value) {
-    return _current.put(key, value.toString());
+  public String put(final String key, final Integer value) {
+    return this._current.put(key, value.toString());
   }
 
   /**
    * Put Long. Stores as String.
-   *
-   * @param key
-   * @param value
-   * @return
    */
-  public String put(String key, Long value) {
-    return _current.put(key, value.toString());
+  public String put(final String key, final Long value) {
+    return this._current.put(key, value.toString());
   }
 
   /**
    * Put Double. Stores as String.
-   *
-   * @param key
-   * @param value
-   * @return
    */
-  public String put(String key, Double value) {
-    return _current.put(key, value.toString());
+  public String put(final String key, final Double value) {
+    return this._current.put(key, value.toString());
   }
 
   /**
    * Put everything in the map into the props.
-   *
-   * @param m
    */
-  public void putAll(Map<? extends String, ? extends String> m) {
+  public void putAll(final Map<? extends String, ? extends String> m) {
     if (m == null) {
       return;
     }
 
-    for (Map.Entry<? extends String, ? extends String> entry : m.entrySet()) {
+    for (final Map.Entry<? extends String, ? extends String> entry : m.entrySet()) {
       this.put(entry.getKey(), entry.getValue());
     }
   }
 
   /**
    * Put all properties in the props into the current props. Will handle null p.
-   *
-   * @param p
    */
-  public void putAll(Props p) {
+  public void putAll(final Props p) {
     if (p == null) {
       return;
     }
 
-    for (String key : p.getKeySet()) {
+    for (final String key : p.getKeySet()) {
       this.put(key, p.get(key));
     }
   }
 
   /**
    * Puts only the local props from p into the current properties
-   *
-   * @param p
    */
-  public void putLocal(Props p) {
-    for (String key : p.localKeySet()) {
+  public void putLocal(final Props p) {
+    for (final String key : p.localKeySet()) {
       this.put(key, p.get(key));
     }
   }
 
   /**
    * Remove only the local value of key s, and not the parents.
-   *
-   * @param s
-   * @return
    */
-  public String removeLocal(Object s) {
-    return _current.remove(s);
+  public String removeLocal(final Object s) {
+    return this._current.remove(s);
   }
 
   /**
@@ -395,17 +358,14 @@ public class Props {
    * parent Props are not counted)
    */
   public int localSize() {
-    return _current.size();
+    return this._current.size();
   }
 
   /**
    * Attempts to return the Class that corresponds to the Props value. If the
    * class doesn't exit, an IllegalArgumentException will be thrown.
-   *
-   * @param key
-   * @return
    */
-  public Class<?> getClass(String key) {
+  public Class<?> getClass(final String key) {
     try {
       if (containsKey(key)) {
         return Class.forName(get(key));
@@ -413,12 +373,12 @@ public class Props {
         throw new UndefinedPropertyException("Missing required property '"
             + key + "'");
       }
-    } catch (ClassNotFoundException e) {
+    } catch (final ClassNotFoundException e) {
       throw new IllegalArgumentException(e);
     }
   }
 
-  public Class<?> getClass(String key, boolean initialize, ClassLoader cl) {
+  public Class<?> getClass(final String key, final boolean initialize, final ClassLoader cl) {
     try {
       if (containsKey(key)) {
         return Class.forName(get(key), initialize, cl);
@@ -426,7 +386,7 @@ public class Props {
         throw new UndefinedPropertyException("Missing required property '"
             + key + "'");
       }
-    } catch (ClassNotFoundException e) {
+    } catch (final ClassNotFoundException e) {
       throw new IllegalArgumentException(e);
     }
   }
@@ -434,12 +394,8 @@ public class Props {
   /**
    * Gets the class from the Props. If it doesn't exist, it will return the
    * defaultClass
-   *
-   * @param key
-   * @param c
-   * @return
    */
-  public Class<?> getClass(String key, Class<?> defaultClass) {
+  public Class<?> getClass(final String key, final Class<?> defaultClass) {
     if (containsKey(key)) {
       return getClass(key);
     } else {
@@ -450,12 +406,8 @@ public class Props {
   /**
    * Gets the string from the Props. If it doesn't exist, it will return the
    * defaultValue
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public String getString(String key, String defaultValue) {
+  public String getString(final String key, final String defaultValue) {
     if (containsKey(key)) {
       return get(key);
     } else {
@@ -466,12 +418,8 @@ public class Props {
   /**
    * Gets the string from the Props. If it doesn't exist, throw and
    * UndefinedPropertiesException
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public String getString(String key) {
+  public String getString(final String key) {
     if (containsKey(key)) {
       return get(key);
     } else {
@@ -482,23 +430,16 @@ public class Props {
 
   /**
    * Returns a list of strings with the comma as the separator of the value
-   *
-   * @param key
-   * @return
    */
-  public List<String> getStringList(String key) {
+  public List<String> getStringList(final String key) {
     return getStringList(key, "\\s*,\\s*");
   }
 
   /**
    * Returns a list of strings with the sep as the separator of the value
-   *
-   * @param key
-   * @param sep
-   * @return
    */
-  public List<String> getStringList(String key, String sep) {
-    String val = get(key);
+  public List<String> getStringList(final String key, final String sep) {
+    final String val = get(key);
     if (val == null || val.trim().length() == 0) {
       return Collections.emptyList();
     }
@@ -514,11 +455,8 @@ public class Props {
   /**
    * Returns a list of strings with the comma as the separator of the value. If
    * the value is null, it'll return the defaultValue.
-   *
-   * @param key
-   * @return
    */
-  public List<String> getStringList(String key, List<String> defaultValue) {
+  public List<String> getStringList(final String key, final List<String> defaultValue) {
     if (containsKey(key)) {
       return getStringList(key);
     } else {
@@ -529,12 +467,9 @@ public class Props {
   /**
    * Returns a list of strings with the sep as the separator of the value. If
    * the value is null, it'll return the defaultValue.
-   *
-   * @param key
-   * @return
    */
-  public List<String> getStringList(String key, List<String> defaultValue,
-      String sep) {
+  public List<String> getStringList(final String key, final List<String> defaultValue,
+      final String sep) {
     if (containsKey(key)) {
       return getStringList(key, sep);
     } else {
@@ -545,12 +480,8 @@ public class Props {
   /**
    * Returns true if the value equals "true". If the value is null, then the
    * default value is returned.
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public boolean getBoolean(String key, boolean defaultValue) {
+  public boolean getBoolean(final String key, final boolean defaultValue) {
     if (containsKey(key)) {
       return "true".equalsIgnoreCase(get(key).trim());
     } else {
@@ -561,28 +492,22 @@ public class Props {
   /**
    * Returns true if the value equals "true". If the value is null, then an
    * UndefinedPropertyException is thrown.
-   *
-   * @param key
-   * @return
    */
-  public boolean getBoolean(String key) {
-    if (containsKey(key))
+  public boolean getBoolean(final String key) {
+    if (containsKey(key)) {
       return "true".equalsIgnoreCase(get(key));
-    else
+    } else {
       throw new UndefinedPropertyException("Missing required property '" + key
           + "'");
+    }
   }
 
   /**
    * Returns the long representation of the value. If the value is null, then
    * the default value is returned. If the value isn't a long, then a parse
    * exception will be thrown.
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public long getLong(String name, long defaultValue) {
+  public long getLong(final String name, final long defaultValue) {
     if (containsKey(name)) {
       return Long.parseLong(get(name));
     } else {
@@ -594,11 +519,8 @@ public class Props {
    * Returns the long representation of the value. If the value is null, then a
    * UndefinedPropertyException will be thrown. If the value isn't a long, then
    * a parse exception will be thrown.
-   *
-   * @param key
-   * @return
    */
-  public long getLong(String name) {
+  public long getLong(final String name) {
     if (containsKey(name)) {
       return Long.parseLong(get(name));
     } else {
@@ -611,12 +533,8 @@ public class Props {
    * Returns the int representation of the value. If the value is null, then the
    * default value is returned. If the value isn't a int, then a parse exception
    * will be thrown.
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public int getInt(String name, int defaultValue) {
+  public int getInt(final String name, final int defaultValue) {
     if (containsKey(name)) {
       return Integer.parseInt(get(name).trim());
     } else {
@@ -628,11 +546,8 @@ public class Props {
    * Returns the int representation of the value. If the value is null, then a
    * UndefinedPropertyException will be thrown. If the value isn't a int, then a
    * parse exception will be thrown.
-   *
-   * @param key
-   * @return
    */
-  public int getInt(String name) {
+  public int getInt(final String name) {
     if (containsKey(name)) {
       return Integer.parseInt(get(name).trim());
     } else {
@@ -645,12 +560,8 @@ public class Props {
    * Returns the double representation of the value. If the value is null, then
    * the default value is returned. If the value isn't a double, then a parse
    * exception will be thrown.
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public double getDouble(String name, double defaultValue) {
+  public double getDouble(final String name, final double defaultValue) {
     if (containsKey(name)) {
       return Double.parseDouble(get(name).trim());
     } else {
@@ -662,11 +573,8 @@ public class Props {
    * Returns the double representation of the value. If the value is null, then
    * a UndefinedPropertyException will be thrown. If the value isn't a double,
    * then a parse exception will be thrown.
-   *
-   * @param key
-   * @return
    */
-  public double getDouble(String name) {
+  public double getDouble(final String name) {
     if (containsKey(name)) {
       return Double.parseDouble(get(name).trim());
     } else {
@@ -679,16 +587,12 @@ public class Props {
    * Returns the uri representation of the value. If the value is null, then the
    * default value is returned. If the value isn't a uri, then a
    * IllegalArgumentException will be thrown.
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public URI getUri(String name) {
+  public URI getUri(final String name) {
     if (containsKey(name)) {
       try {
         return new URI(get(name));
-      } catch (URISyntaxException e) {
+      } catch (final URISyntaxException e) {
         throw new IllegalArgumentException(e.getMessage());
       }
     } else {
@@ -701,12 +605,8 @@ public class Props {
    * Returns the double representation of the value. If the value is null, then
    * the default value is returned. If the value isn't a uri, then a
    * IllegalArgumentException will be thrown.
-   *
-   * @param key
-   * @param defaultValue
-   * @return
    */
-  public URI getUri(String name, URI defaultValue) {
+  public URI getUri(final String name, final URI defaultValue) {
     if (containsKey(name)) {
       return getUri(name);
     } else {
@@ -714,10 +614,10 @@ public class Props {
     }
   }
 
-  public URI getUri(String name, String defaultValue) {
+  public URI getUri(final String name, final String defaultValue) {
     try {
       return getUri(name, new URI(defaultValue));
-    } catch (URISyntaxException e) {
+    } catch (final URISyntaxException e) {
       throw new IllegalArgumentException(e.getMessage());
     }
   }
@@ -728,8 +628,8 @@ public class Props {
    * @param file The file to write to
    * @throws IOException If the file can't be found or there is an io error
    */
-  public void storeLocal(File file) throws IOException {
-    BufferedOutputStream out =
+  public void storeLocal(final File file) throws IOException {
+    final BufferedOutputStream out =
         new BufferedOutputStream(new FileOutputStream(file));
     try {
       storeLocal(out);
@@ -740,12 +640,9 @@ public class Props {
 
   /**
    * Returns a copy of only the local values of this props
-   *
-   * @return
    */
-  @SuppressWarnings("unchecked")
   public Props local() {
-    return new Props(null, _current);
+    return new Props(null, this._current);
   }
 
   /**
@@ -754,9 +651,9 @@ public class Props {
    * @param out The output stream to write to
    * @throws IOException If the file can't be found or there is an io error
    */
-  public void storeLocal(OutputStream out) throws IOException {
-    Properties p = new Properties();
-    for (String key : _current.keySet()) {
+  public void storeLocal(final OutputStream out) throws IOException {
+    final Properties p = new Properties();
+    for (final String key : this._current.keySet()) {
       p.setProperty(key, get(key));
     }
     p.store(out, null);
@@ -764,12 +661,10 @@ public class Props {
 
   /**
    * Returns a java.util.Properties file populated with the stuff in here.
-   *
-   * @return
    */
   public Properties toProperties() {
-    Properties p = new Properties();
-    for (String key : _current.keySet()) {
+    final Properties p = new Properties();
+    for (final String key : this._current.keySet()) {
       p.setProperty(key, get(key));
     }
 
@@ -782,8 +677,8 @@ public class Props {
    * @param file The file to store to
    * @throws IOException If there is an error writing
    */
-  public void storeFlattened(File file) throws IOException {
-    BufferedOutputStream out =
+  public void storeFlattened(final File file) throws IOException {
+    final BufferedOutputStream out =
         new BufferedOutputStream(new FileOutputStream(file));
     try {
       storeFlattened(out);
@@ -798,10 +693,10 @@ public class Props {
    * @param out The stream to write to
    * @throws IOException If there is an error writing
    */
-  public void storeFlattened(OutputStream out) throws IOException {
-    Properties p = new Properties();
+  public void storeFlattened(final OutputStream out) throws IOException {
+    final Properties p = new Properties();
     for (Props curr = this; curr != null; curr = curr.getParent()) {
-      for (String key : curr.localKeySet()) {
+      for (final String key : curr.localKeySet()) {
         if (!p.containsKey(key)) {
           p.setProperty(key, get(key));
         }
@@ -812,16 +707,15 @@ public class Props {
   }
 
   /**
-   * Returns a map of all the flattened properties, the item in the returned map is sorted alphabetically
-   * by the key value.
-   *
+   * Returns a map of all the flattened properties, the item in the returned map is sorted
+   * alphabetically by the key value.
    *
    * @Return
    */
-  public Map<String,String> getFlattened(){
-    TreeMap<String,String> returnVal = new TreeMap<String,String>(); 
+  public Map<String, String> getFlattened() {
+    final TreeMap<String, String> returnVal = new TreeMap<>();
     returnVal.putAll(getMapByPrefix(""));
-    return returnVal; 
+    return returnVal;
   }
 
   /**
@@ -829,12 +723,12 @@ public class Props {
    *
    * @param prefix The string prefix
    */
-  public Map<String, String> getMapByPrefix(String prefix) {
-    Map<String, String> values = _parent == null ? new HashMap<String, String>():
-                                                   _parent.getMapByPrefix(prefix);
+  public Map<String, String> getMapByPrefix(final String prefix) {
+    final Map<String, String> values = this._parent == null ? new HashMap<>() :
+        this._parent.getMapByPrefix(prefix);
 
     // when there is a conflict, value from the child takes the priority.
-    for (String key : this.localKeySet()) {
+    for (final String key : this.localKeySet()) {
       if (key.startsWith(prefix)) {
         values.put(key.substring(prefix.length()), get(key));
       }
@@ -844,16 +738,14 @@ public class Props {
 
   /**
    * Returns a set of all keys, including the parents
-   *
-   * @return
    */
   public Set<String> getKeySet() {
-    HashSet<String> keySet = new HashSet<String>();
+    final HashSet<String> keySet = new HashSet<>();
 
     keySet.addAll(localKeySet());
 
-    if (_parent != null) {
-      keySet.addAll(_parent.getKeySet());
+    if (this._parent != null) {
+      keySet.addAll(this._parent.getKeySet());
     }
 
     return keySet;
@@ -861,50 +753,19 @@ public class Props {
 
   /**
    * Logs the property in the given logger
-   *
-   * @param logger
-   * @param comment
    */
-  public void logProperties(Logger logger, String comment) {
+  public void logProperties(final Logger logger, final String comment) {
     logger.info(comment);
 
-    for (String key : getKeySet()) {
+    for (final String key : getKeySet()) {
       logger.info("  key=" + key + " value=" + get(key));
     }
   }
 
   /**
-   * Clones the Props p object and all of its parents.
-   *
-   * @param p
-   * @return
-   */
-  public static Props clone(Props p) {
-    return copyNext(p);
-  }
-
-  /**
-   *
-   * @param source
-   * @return
    */
-  private static Props copyNext(Props source) {
-    Props priorNodeCopy = null;
-    if (source.getParent() != null) {
-      priorNodeCopy = copyNext(source.getParent());
-    }
-    Props dest = new Props(priorNodeCopy);
-    for (String key : source.localKeySet()) {
-      dest.put(key, source.get(key));
-    }
-
-    return dest;
-  }
-
-  /**
-     */
   @Override
-  public boolean equals(Object o) {
+  public boolean equals(final Object o) {
     if (o == this) {
       return true;
     } else if (o == null) {
@@ -913,23 +774,20 @@ public class Props {
       return false;
     }
 
-    Props p = (Props) o;
-    return _current.equals(p._current) && Utils.equals(this._parent, p._parent);
+    final Props p = (Props) o;
+    return this._current.equals(p._current) && Utils.equals(this._parent, p._parent);
   }
 
   /**
    * Returns true if the properties are equivalent, regardless of the hierarchy.
-   *
-   * @param p
-   * @return
    */
-  public boolean equalsProps(Props p) {
+  public boolean equalsProps(final Props p) {
     if (p == null) {
       return false;
     }
 
     final Set<String> myKeySet = getKeySet();
-    for (String s : myKeySet) {
+    for (final String s : myKeySet) {
       if (!get(s).equals(p.get(s))) {
         return false;
       }
@@ -939,45 +797,42 @@ public class Props {
   }
 
   /**
-     *
-     */
+   *
+   */
   @Override
   public int hashCode() {
     int code = this._current.hashCode();
-    if (_parent != null)
-      code += _parent.hashCode();
+    if (this._parent != null) {
+      code += this._parent.hashCode();
+    }
     return code;
   }
 
   /**
-     *
-     */
+   *
+   */
   @Override
   public String toString() {
-    StringBuilder builder = new StringBuilder("{");
-    for (Map.Entry<String, String> entry : this._current.entrySet()) {
+    final StringBuilder builder = new StringBuilder("{");
+    for (final Map.Entry<String, String> entry : this._current.entrySet()) {
       builder.append(entry.getKey());
       builder.append(": ");
       builder.append(entry.getValue());
       builder.append(", ");
     }
-    if (_parent != null) {
+    if (this._parent != null) {
       builder.append(" parent = ");
-      builder.append(_parent.toString());
+      builder.append(this._parent.toString());
     }
     builder.append("}");
     return builder.toString();
   }
 
   public String getSource() {
-    return source;
+    return this.source;
   }
 
-  public void setSource(String source) {
+  public void setSource(final String source) {
     this.source = source;
   }
-
-  public void setParent(Props prop) {
-    this._parent = prop;
-  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/PropsUtils.java b/azkaban-common/src/main/java/azkaban/utils/PropsUtils.java
index 3407dab..449d432 100644
--- a/azkaban-common/src/main/java/azkaban/utils/PropsUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/PropsUtils.java
@@ -16,6 +16,10 @@
 
 package azkaban.utils;
 
+import azkaban.executor.ExecutableFlowBase;
+import azkaban.flow.CommonJobProperties;
+import com.google.common.collect.MapDifference;
+import com.google.common.collect.Maps;
 import java.io.File;
 import java.io.IOException;
 import java.util.Arrays;
@@ -27,25 +31,20 @@ import java.util.Set;
 import java.util.UUID;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
-
 import org.apache.commons.jexl2.Expression;
 import org.apache.commons.jexl2.JexlEngine;
 import org.apache.commons.jexl2.JexlException;
 import org.apache.commons.jexl2.MapContext;
 import org.apache.commons.lang.StringUtils;
-
 import org.apache.log4j.Logger;
 import org.joda.time.DateTime;
 
-import com.google.common.collect.Maps;
-import com.google.common.collect.MapDifference;
-
-import azkaban.executor.ExecutableFlowBase;
-import azkaban.flow.CommonJobProperties;
-
 public class PropsUtils {
 
   private static final Logger logger = Logger.getLogger(PropsUtils.class);
+  private static final Pattern VARIABLE_REPLACEMENT_PATTERN = Pattern
+      .compile("\\$\\{([a-zA-Z_.0-9]+)\\}");
+
   /**
    * Load job schedules from the given directories ] * @param dir The directory
    * to look in
@@ -53,7 +52,7 @@ public class PropsUtils {
    * @param suffixes File suffixes to load
    * @return The loaded set of schedules
    */
-  public static Props loadPropsInDir(File dir, String... suffixes) {
+  public static Props loadPropsInDir(final File dir, final String... suffixes) {
     return loadPropsInDir(null, dir, suffixes);
   }
 
@@ -65,35 +64,35 @@ public class PropsUtils {
    * @param suffixes File suffixes to load
    * @return The loaded set of schedules
    */
-  public static Props loadPropsInDir(Props parent, File dir, String... suffixes) {
+  public static Props loadPropsInDir(final Props parent, final File dir, final String... suffixes) {
     try {
-      Props props = new Props(parent);
-      File[] files = dir.listFiles();
+      final Props props = new Props(parent);
+      final File[] files = dir.listFiles();
       Arrays.sort(files);
       if (files != null) {
-        for (File f : files) {
+        for (final File f : files) {
           if (f.isFile() && endsWith(f, suffixes)) {
             props.putAll(new Props(null, f.getAbsolutePath()));
           }
         }
       }
       return props;
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new RuntimeException("Error loading properties.", e);
     }
   }
 
-  public static Props loadProps(Props parent, File... propFiles) {
+  public static Props loadProps(final Props parent, final File... propFiles) {
     try {
       Props props = new Props(parent);
-      for (File f : propFiles) {
+      for (final File f : propFiles) {
         if (f.isFile()) {
           props = new Props(props, f);
         }
       }
 
       return props;
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new RuntimeException("Error loading properties.", e);
     }
   }
@@ -105,9 +104,9 @@ public class PropsUtils {
    * @param suffixes The suffixes to load
    * @return The properties
    */
-  public static Props loadPropsInDirs(List<File> dirs, String... suffixes) {
-    Props props = new Props();
-    for (File dir : dirs) {
+  public static Props loadPropsInDirs(final List<File> dirs, final String... suffixes) {
+    final Props props = new Props();
+    for (final File dir : dirs) {
       props.putLocal(loadPropsInDir(dir, suffixes));
     }
     return props;
@@ -120,78 +119,79 @@ public class PropsUtils {
    * @param props The parent properties for loaded properties
    * @param suffixes The suffixes of files to load
    */
-  public static void loadPropsBySuffix(File jobPath, Props props,
-      String... suffixes) {
+  public static void loadPropsBySuffix(final File jobPath, final Props props,
+      final String... suffixes) {
     try {
       if (jobPath.isDirectory()) {
-        File[] files = jobPath.listFiles();
+        final File[] files = jobPath.listFiles();
         if (files != null) {
-          for (File file : files)
+          for (final File file : files) {
             loadPropsBySuffix(file, props, suffixes);
+          }
         }
       } else if (endsWith(jobPath, suffixes)) {
         props.putAll(new Props(null, jobPath.getAbsolutePath()));
       }
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new RuntimeException("Error loading schedule properties.", e);
     }
   }
 
-  public static boolean endsWith(File file, String... suffixes) {
-    for (String suffix : suffixes)
-      if (file.getName().endsWith(suffix))
+  public static boolean endsWith(final File file, final String... suffixes) {
+    for (final String suffix : suffixes) {
+      if (file.getName().endsWith(suffix)) {
         return true;
+      }
+    }
     return false;
   }
 
-  private static final Pattern VARIABLE_REPLACEMENT_PATTERN = Pattern
-      .compile("\\$\\{([a-zA-Z_.0-9]+)\\}");
-
-  public static boolean isVarialbeReplacementPattern(String str) {
-    Matcher matcher = VARIABLE_REPLACEMENT_PATTERN.matcher(str);
+  public static boolean isVarialbeReplacementPattern(final String str) {
+    final Matcher matcher = VARIABLE_REPLACEMENT_PATTERN.matcher(str);
     return matcher.matches();
   }
 
-  public static Props resolveProps(Props props) {
-    if (props == null)
+  public static Props resolveProps(final Props props) {
+    if (props == null) {
       return null;
+    }
 
-    Props resolvedProps = new Props();
+    final Props resolvedProps = new Props();
 
-    LinkedHashSet<String> visitedVariables = new LinkedHashSet<String>();
-    for (String key : props.getKeySet()) {
-      String value = props.get(key);
+    final LinkedHashSet<String> visitedVariables = new LinkedHashSet<>();
+    for (final String key : props.getKeySet()) {
+      final String value = props.get(key);
 
       visitedVariables.add(key);
-      String replacedValue =
+      final String replacedValue =
           resolveVariableReplacement(value, props, visitedVariables);
       visitedVariables.clear();
 
       resolvedProps.put(key, replacedValue);
     }
 
-    for (String key : resolvedProps.getKeySet()) {
-      String value = resolvedProps.get(key);
-      String expressedValue = resolveVariableExpression(value);
+    for (final String key : resolvedProps.getKeySet()) {
+      final String value = resolvedProps.get(key);
+      final String expressedValue = resolveVariableExpression(value);
       resolvedProps.put(key, expressedValue);
     }
 
     return resolvedProps;
-  };
+  }
 
-  private static String resolveVariableReplacement(String value, Props props,
-      LinkedHashSet<String> visitedVariables) {
-    StringBuffer buffer = new StringBuffer();
+  private static String resolveVariableReplacement(final String value, final Props props,
+      final LinkedHashSet<String> visitedVariables) {
+    final StringBuffer buffer = new StringBuffer();
     int startIndex = 0;
 
-    Matcher matcher = VARIABLE_REPLACEMENT_PATTERN.matcher(value);
+    final Matcher matcher = VARIABLE_REPLACEMENT_PATTERN.matcher(value);
     while (matcher.find(startIndex)) {
       if (startIndex < matcher.start()) {
         // Copy everything up front to the buffer
         buffer.append(value.substring(startIndex, matcher.start()));
       }
 
-      String subVariable = matcher.group(1);
+      final String subVariable = matcher.group(1);
       // Detected a cycle
       if (visitedVariables.contains(subVariable)) {
         throw new IllegalArgumentException(String.format(
@@ -199,7 +199,7 @@ public class PropsUtils {
             StringUtils.join(visitedVariables, "->"), subVariable));
       } else {
         // Add substitute variable and recurse.
-        String replacement = props.get(subVariable);
+        final String replacement = props.get(subVariable);
         visitedVariables.add(subVariable);
 
         if (replacement == null) {
@@ -223,23 +223,18 @@ public class PropsUtils {
     return buffer.toString();
   }
 
-  private static String resolveVariableExpression(String value) {
-    JexlEngine jexl = new JexlEngine();
+  private static String resolveVariableExpression(final String value) {
+    final JexlEngine jexl = new JexlEngine();
     return resolveVariableExpression(value, value.length(), jexl);
   }
 
   /**
    * Function that looks for expressions to parse. It parses backwards to
    * capture embedded expressions
-   *
-   * @param value
-   * @param last
-   * @param jexl
-   * @return
    */
-  private static String resolveVariableExpression(String value, int last,
-      JexlEngine jexl) {
-    int lastIndex = value.lastIndexOf("$(", last);
+  private static String resolveVariableExpression(final String value, final int last,
+      final JexlEngine jexl) {
+    final int lastIndex = value.lastIndexOf("$(", last);
     if (lastIndex == -1) {
       return value;
     }
@@ -264,12 +259,12 @@ public class PropsUtils {
           + " not well formed.");
     }
 
-    String innerExpression = value.substring(lastIndex + 2, nextClosed);
+    final String innerExpression = value.substring(lastIndex + 2, nextClosed);
     Object result = null;
     try {
-      Expression e = jexl.createExpression(innerExpression);
+      final Expression e = jexl.createExpression(innerExpression);
       result = e.evaluate(new MapContext());
-    } catch (JexlException e) {
+    } catch (final JexlException e) {
       throw new IllegalArgumentException("Expression " + value
           + " not well formed. " + e.getMessage(), e);
     }
@@ -279,15 +274,15 @@ public class PropsUtils {
       return value;
     }
 
-    String newValue =
+    final String newValue =
         value.substring(0, lastIndex) + result.toString()
             + value.substring(nextClosed + 1);
     return resolveVariableExpression(newValue, lastIndex, jexl);
   }
 
-  public static Props addCommonFlowProperties(Props parentProps,
+  public static Props addCommonFlowProperties(final Props parentProps,
       final ExecutableFlowBase flow) {
-    Props props = new Props(parentProps);
+    final Props props = new Props(parentProps);
 
     props.put(CommonJobProperties.FLOW_ID, flow.getFlowId());
     props.put(CommonJobProperties.EXEC_ID, flow.getExecutionId());
@@ -299,7 +294,7 @@ public class PropsUtils {
     props.put(CommonJobProperties.PROJECT_LAST_CHANGED_DATE, flow.getLastModifiedTimestamp());
     props.put(CommonJobProperties.SUBMIT_USER, flow.getExecutableFlow().getSubmitUser());
 
-    DateTime loadTime = new DateTime();
+    final DateTime loadTime = new DateTime();
 
     props.put(CommonJobProperties.FLOW_START_TIMESTAMP, loadTime.toString());
     props.put(CommonJobProperties.FLOW_START_YEAR, loadTime.toString("yyyy"));
@@ -316,49 +311,48 @@ public class PropsUtils {
     return props;
   }
 
-  public static String toJSONString(Props props, boolean localOnly) {
-    Map<String, String> map = toStringMap(props, localOnly);
+  public static String toJSONString(final Props props, final boolean localOnly) {
+    final Map<String, String> map = toStringMap(props, localOnly);
     return JSONUtils.toJSON(map);
   }
 
-  public static Map<String, String> toStringMap(Props props, boolean localOnly) {
-    HashMap<String, String> map = new HashMap<String, String>();
-    Set<String> keyset = localOnly ? props.localKeySet() : props.getKeySet();
+  public static Map<String, String> toStringMap(final Props props, final boolean localOnly) {
+    final HashMap<String, String> map = new HashMap<>();
+    final Set<String> keyset = localOnly ? props.localKeySet() : props.getKeySet();
 
-    for (String key : keyset) {
-      String value = props.get(key);
+    for (final String key : keyset) {
+      final String value = props.get(key);
       map.put(key, value);
     }
 
     return map;
   }
 
-  public static Props fromJSONString(String json) throws IOException {
-    Map<String, String> obj = (Map<String, String>) JSONUtils.parseJSONFromString(json);
-    Props props = new Props(null, obj);
+  public static Props fromJSONString(final String json) throws IOException {
+    final Map<String, String> obj = (Map<String, String>) JSONUtils.parseJSONFromString(json);
+    final Props props = new Props(null, obj);
     return props;
   }
 
-  @SuppressWarnings("unchecked")
-  public static Props fromHierarchicalMap(Map<String, Object> propsMap) {
+  public static Props fromHierarchicalMap(final Map<String, Object> propsMap) {
     if (propsMap == null) {
       return null;
     }
 
-    String source = (String) propsMap.get("source");
-    Map<String, String> propsParams =
+    final String source = (String) propsMap.get("source");
+    final Map<String, String> propsParams =
         (Map<String, String>) propsMap.get("props");
 
-    Map<String, Object> parent = (Map<String, Object>) propsMap.get("parent");
-    Props parentProps = fromHierarchicalMap(parent);
+    final Map<String, Object> parent = (Map<String, Object>) propsMap.get("parent");
+    final Props parentProps = fromHierarchicalMap(parent);
 
-    Props props = new Props(parentProps, propsParams);
+    final Props props = new Props(parentProps, propsParams);
     props.setSource(source);
     return props;
   }
 
-  public static Map<String, Object> toHierarchicalMap(Props props) {
-    Map<String, Object> propsMap = new HashMap<String, Object>();
+  public static Map<String, Object> toHierarchicalMap(final Props props) {
+    final Map<String, Object> propsMap = new HashMap<>();
     propsMap.put("source", props.getSource());
     propsMap.put("props", toStringMap(props, true));
 
@@ -370,13 +364,11 @@ public class PropsUtils {
   }
 
   /**
-   * @param oldProps
-   * @param newProps
    * @return the difference between oldProps and newProps.
    */
   public static String getPropertyDiff(Props oldProps, Props newProps) {
 
-    StringBuilder builder = new StringBuilder("");
+    final StringBuilder builder = new StringBuilder("");
 
     // oldProps can not be null during the below comparison process.
     if (oldProps == null) {
@@ -387,10 +379,10 @@ public class PropsUtils {
       newProps = new Props();
     }
 
-    MapDifference<String, String> md =
+    final MapDifference<String, String> md =
         Maps.difference(toStringMap(oldProps, false), toStringMap(newProps, false));
 
-    Map<String, String> newlyCreatedProperty = md.entriesOnlyOnRight();
+    final Map<String, String> newlyCreatedProperty = md.entriesOnlyOnRight();
     if (newlyCreatedProperty != null && newlyCreatedProperty.size() > 0) {
       builder.append("Newly created Properties: ");
       newlyCreatedProperty.forEach((k, v) -> {
@@ -399,7 +391,7 @@ public class PropsUtils {
       builder.append("\n");
     }
 
-    Map<String, String> deletedProperty = md.entriesOnlyOnLeft();
+    final Map<String, String> deletedProperty = md.entriesOnlyOnLeft();
     if (deletedProperty != null && deletedProperty.size() > 0) {
       builder.append("Deleted Properties: ");
       deletedProperty.forEach((k, v) -> {
@@ -408,7 +400,7 @@ public class PropsUtils {
       builder.append("\n");
     }
 
-    Map<String, MapDifference.ValueDifference<String>> diffProperties = md.entriesDiffering();
+    final Map<String, MapDifference.ValueDifference<String>> diffProperties = md.entriesDiffering();
     if (diffProperties != null && diffProperties.size() > 0) {
       builder.append("Modified Properties: ");
       diffProperties.forEach((k, v) -> {
diff --git a/azkaban-common/src/main/java/azkaban/utils/RestfulApiClient.java b/azkaban-common/src/main/java/azkaban/utils/RestfulApiClient.java
index ef0ef46..89242a8 100644
--- a/azkaban-common/src/main/java/azkaban/utils/RestfulApiClient.java
+++ b/azkaban-common/src/main/java/azkaban/utils/RestfulApiClient.java
@@ -21,12 +21,10 @@ import java.io.UnsupportedEncodingException;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.List;
-
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpMessage;
 import org.apache.http.HttpResponse;
 import org.apache.http.NameValuePair;
-import org.apache.http.ParseException;
 import org.apache.http.client.HttpResponseException;
 import org.apache.http.client.methods.HttpDelete;
 import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
@@ -40,130 +38,41 @@ import org.apache.http.impl.client.CloseableHttpClient;
 import org.apache.http.impl.client.HttpClients;
 import org.apache.log4j.Logger;
 
-/** class handles the communication between the application and
- *  a Restful API based web server.
- *  @param T : type of the returning response object.
- *  Note: the idea of this abstract class is to provide a wrapper for the logic around HTTP layer communication so
- *        development work can take this as a black box and focus on processing the result.
- *        With that said the abstract class will be provided as a template, which ideally can support different types
- *        of returning object (Dictionary, xmlDoc , text etc.)
- * */
+/**
+ * class handles the communication between the application and
+ * a Restful API based web server.
+ *
+ * @param T : type of the returning response object. Note: the idea of this abstract class is to
+ * provide a wrapper for the logic around HTTP layer communication so development work can take this
+ * as a black box and focus on processing the result. With that said the abstract class will be
+ * provided as a template, which ideally can support different types of returning object
+ * (Dictionary, xmlDoc , text etc.)
+ */
 public abstract class RestfulApiClient<T> {
-  protected static Logger logger = Logger.getLogger(RestfulApiClient.class);
-
-  /** Method to transform the response returned by the httpClient into the
-   *  type specified.
-   *  Note: Method need to handle case such as failed request.
-   *        Also method is not supposed to pass the response object out
-   *        via the returning value as the response will be closed after the
-   *        execution steps out of the method context.
-   * @throws HttpResponseException
-   * @throws IOException
-   * @throws ParseException
-   * **/
-  protected abstract T parseResponse(HttpResponse  response)
-      throws HttpResponseException, IOException;
-
-  /** function to perform a Get http request.
-   * @param uri   the URI of the request.
-   * @param headerEntries   extra entries to be added to request header.
-   * @return the response object type of which is specified by user.
-   * @throws IOException */
-  public T httpGet(URI uri, List<NameValuePair> headerEntries) throws IOException{
-    // shortcut if the passed url is invalid.
-    if (null == uri){
-      logger.error(" unable to perform httpGet as the passed uri is null");
-      return null;
-    }
-
-    HttpGet get = new HttpGet(uri);
-    return this.sendAndReturn((HttpGet)completeRequest(get, headerEntries));
-  }
-
-  /** function to perform a Post http request.
-   * @param uri   the URI of the request.
-   * @param headerEntries   extra entries to be added to request header.
-   * @param postingBody  the content to be posted , optional.
-   * @return the response object type of which is specified by user.
-   * @throws UnsupportedEncodingException, IOException */
-  public T httpPost(URI uri,
-      List<NameValuePair> headerEntries,
-      String postingBody) throws UnsupportedEncodingException, IOException{
-    // shortcut if the passed url is invalid.
-    if (null == uri){
-      logger.error(" unable to perform httpPost as the passed uri is null.");
-      return null;
-    }
-
-    HttpPost post = new HttpPost(uri);
-    return this.sendAndReturn(completeRequest(post,headerEntries,postingBody));
-  }
 
-  /** function to perform a Delete http request.
-   * @param uri   the URI of the request.
-   * @param headerEntries   extra entries to be added to request header.
-   * @return the response object type of which is specified by user.
-   * @throws IOException */
-  public T httpDelete(URI uri, List<NameValuePair> headerEntries) throws IOException{
-    // shortcut if the passed url is invalid.
-    if (null == uri){
-      logger.error(" unable to perform httpDelete as the passed uri is null.");
-      return null;
-    }
-
-    HttpDelete delete = new HttpDelete(uri);
-    return this.sendAndReturn((HttpDelete)completeRequest(delete, headerEntries));
-  }
-
-  /** function to perform a Put http request.
-   * @param uri   the URI of the request.
-   * @param headerEntries   extra entries to be added to request header.
-   * @param postingBody  the content to be posted , optional.
-   * @return the response object type of which is specified by user.
-   * @throws UnsupportedEncodingException, IOException */
-  public T httpPut(URI uri, List<NameValuePair> headerEntries,
-      String postingBody) throws UnsupportedEncodingException, IOException{
-    // shortcut if the passed url is invalid.
-    if (null == uri){
-      logger.error(" unable to perform httpPut as the passed url is null or empty.");
-      return null;
-    }
-
-    HttpPut put = new HttpPut(uri);
-    return this.sendAndReturn(completeRequest(put, headerEntries, postingBody));
-  }
-
-  /** function to dispatch the request and pass back the response.
-   * */
-  protected T sendAndReturn(HttpUriRequest request) throws IOException{
-    CloseableHttpClient client = HttpClients.createDefault();
-    try {
-      return this.parseResponse(client.execute(request));
-    }finally{
-      client.close();
-    }
-  }
+  protected static Logger logger = Logger.getLogger(RestfulApiClient.class);
 
-  /** helper function to build a valid URI.
-   *  @param host   host name.
-   *  @param port   host port.
-   *  @param path   extra path after host.
-   *  @param isHttp indicates if whether Http or HTTPS should be used.
-   *  @param params extra query parameters.
-   *  @return the URI built from the inputs.
-   *  @throws IOException
-   * */
-  public static URI buildUri(String host, int port, String path,
-      boolean isHttp, Pair<String, String>... params) throws IOException{
-    URIBuilder builder = new URIBuilder();
-    builder.setScheme(isHttp? "http" : "https").setHost(host).setPort(port);
-
-    if (null != path && path.length() > 0){
+  /**
+   * helper function to build a valid URI.
+   *
+   * @param host host name.
+   * @param port host port.
+   * @param path extra path after host.
+   * @param isHttp indicates if whether Http or HTTPS should be used.
+   * @param params extra query parameters.
+   * @return the URI built from the inputs.
+   */
+  public static URI buildUri(final String host, final int port, final String path,
+      final boolean isHttp, final Pair<String, String>... params) throws IOException {
+    final URIBuilder builder = new URIBuilder();
+    builder.setScheme(isHttp ? "http" : "https").setHost(host).setPort(port);
+
+    if (null != path && path.length() > 0) {
       builder.setPath(path);
     }
 
     if (params != null) {
-      for (Pair<String, String> pair : params) {
+      for (final Pair<String, String> pair : params) {
         builder.setParameter(pair.getFirst(), pair.getSecond());
       }
     }
@@ -171,24 +80,26 @@ public abstract class RestfulApiClient<T> {
     URI uri = null;
     try {
       uri = builder.build();
-    } catch (URISyntaxException e) {
+    } catch (final URISyntaxException e) {
       throw new IOException(e);
     }
 
     return uri;
   }
 
-  /** helper function to build a valid URI.
-   *  @param uri    the URI to start with.
-   *  @param params extra query parameters to append.
-   *  @return the URI built from the inputs.
-   *  @throws IOException
-   * */
-  public static URI BuildUri(URI uri, Pair<String, String>... params) throws IOException{
-    URIBuilder builder = new URIBuilder(uri);
+  /**
+   * helper function to build a valid URI.
+   *
+   * @param uri the URI to start with.
+   * @param params extra query parameters to append.
+   * @return the URI built from the inputs.
+   */
+  public static URI BuildUri(final URI uri, final Pair<String, String>... params)
+      throws IOException {
+    final URIBuilder builder = new URIBuilder(uri);
 
     if (params != null) {
-      for (Pair<String, String> pair : params) {
+      for (final Pair<String, String> pair : params) {
         builder.setParameter(pair.getFirst(), pair.getSecond());
       }
     }
@@ -196,44 +107,149 @@ public abstract class RestfulApiClient<T> {
     URI returningUri = null;
     try {
       returningUri = builder.build();
-    } catch (URISyntaxException e) {
+    } catch (final URISyntaxException e) {
       throw new IOException(e);
     }
 
     return returningUri;
   }
 
-  /** helper function to fill  the request with header entries .
-   * */
-  private static HttpMessage completeRequest(HttpMessage request,
-      List<NameValuePair> headerEntries){
-    if (null == request){
+  /**
+   * helper function to fill  the request with header entries .
+   */
+  private static HttpMessage completeRequest(final HttpMessage request,
+      final List<NameValuePair> headerEntries) {
+    if (null == request) {
       logger.error("unable to complete request as the passed request object is null");
       return request;
     }
 
     // dump all the header entries to the request.
-    if (null != headerEntries && headerEntries.size() > 0){
-      for (NameValuePair pair : headerEntries){
+    if (null != headerEntries && headerEntries.size() > 0) {
+      for (final NameValuePair pair : headerEntries) {
         request.addHeader(pair.getName(), pair.getValue());
       }
     }
     return request;
   }
 
-  /** helper function to fill  the request with header entries and posting body .
-   * */
-  private static HttpEntityEnclosingRequestBase completeRequest(HttpEntityEnclosingRequestBase request,
-      List<NameValuePair> headerEntries,
-      String postingBody) throws UnsupportedEncodingException{
-     if (null != completeRequest(request, headerEntries)){
+  /**
+   * helper function to fill  the request with header entries and posting body .
+   */
+  private static HttpEntityEnclosingRequestBase completeRequest(
+      final HttpEntityEnclosingRequestBase request,
+      final List<NameValuePair> headerEntries,
+      final String postingBody) throws UnsupportedEncodingException {
+    if (null != completeRequest(request, headerEntries)) {
       // dump the post body UTF-8 will be used as the default encoding type.
-      if (null != postingBody && postingBody.length() > 0){
-        HttpEntity entity = new ByteArrayEntity(postingBody.getBytes("UTF-8"));
-        request.setHeader("Content-Length",  Long.toString(entity.getContentLength()));
+      if (null != postingBody && postingBody.length() > 0) {
+        final HttpEntity entity = new ByteArrayEntity(postingBody.getBytes("UTF-8"));
+        request.setHeader("Content-Length", Long.toString(entity.getContentLength()));
         request.setEntity(entity);
       }
     }
     return request;
   }
+
+  /**
+   * Method to transform the response returned by the httpClient into the
+   * type specified.
+   * Note: Method need to handle case such as failed request.
+   * Also method is not supposed to pass the response object out
+   * via the returning value as the response will be closed after the
+   * execution steps out of the method context.
+   **/
+  protected abstract T parseResponse(HttpResponse response)
+      throws HttpResponseException, IOException;
+
+  /**
+   * function to perform a Get http request.
+   *
+   * @param uri the URI of the request.
+   * @param headerEntries extra entries to be added to request header.
+   * @return the response object type of which is specified by user.
+   */
+  public T httpGet(final URI uri, final List<NameValuePair> headerEntries) throws IOException {
+    // shortcut if the passed url is invalid.
+    if (null == uri) {
+      logger.error(" unable to perform httpGet as the passed uri is null");
+      return null;
+    }
+
+    final HttpGet get = new HttpGet(uri);
+    return this.sendAndReturn((HttpGet) completeRequest(get, headerEntries));
+  }
+
+  /**
+   * function to perform a Post http request.
+   *
+   * @param uri the URI of the request.
+   * @param headerEntries extra entries to be added to request header.
+   * @param postingBody the content to be posted , optional.
+   * @return the response object type of which is specified by user.
+   * @throws UnsupportedEncodingException, IOException
+   */
+  public T httpPost(final URI uri,
+      final List<NameValuePair> headerEntries,
+      final String postingBody) throws UnsupportedEncodingException, IOException {
+    // shortcut if the passed url is invalid.
+    if (null == uri) {
+      logger.error(" unable to perform httpPost as the passed uri is null.");
+      return null;
+    }
+
+    final HttpPost post = new HttpPost(uri);
+    return this.sendAndReturn(completeRequest(post, headerEntries, postingBody));
+  }
+
+  /**
+   * function to perform a Delete http request.
+   *
+   * @param uri the URI of the request.
+   * @param headerEntries extra entries to be added to request header.
+   * @return the response object type of which is specified by user.
+   */
+  public T httpDelete(final URI uri, final List<NameValuePair> headerEntries) throws IOException {
+    // shortcut if the passed url is invalid.
+    if (null == uri) {
+      logger.error(" unable to perform httpDelete as the passed uri is null.");
+      return null;
+    }
+
+    final HttpDelete delete = new HttpDelete(uri);
+    return this.sendAndReturn((HttpDelete) completeRequest(delete, headerEntries));
+  }
+
+  /**
+   * function to perform a Put http request.
+   *
+   * @param uri the URI of the request.
+   * @param headerEntries extra entries to be added to request header.
+   * @param postingBody the content to be posted , optional.
+   * @return the response object type of which is specified by user.
+   * @throws UnsupportedEncodingException, IOException
+   */
+  public T httpPut(final URI uri, final List<NameValuePair> headerEntries,
+      final String postingBody) throws UnsupportedEncodingException, IOException {
+    // shortcut if the passed url is invalid.
+    if (null == uri) {
+      logger.error(" unable to perform httpPut as the passed url is null or empty.");
+      return null;
+    }
+
+    final HttpPut put = new HttpPut(uri);
+    return this.sendAndReturn(completeRequest(put, headerEntries, postingBody));
+  }
+
+  /**
+   * function to dispatch the request and pass back the response.
+   */
+  protected T sendAndReturn(final HttpUriRequest request) throws IOException {
+    final CloseableHttpClient client = HttpClients.createDefault();
+    try {
+      return this.parseResponse(client.execute(request));
+    } finally {
+      client.close();
+    }
+  }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/SplitterOutputStream.java b/azkaban-common/src/main/java/azkaban/utils/SplitterOutputStream.java
index 9c228ae..f1f8ad7 100644
--- a/azkaban-common/src/main/java/azkaban/utils/SplitterOutputStream.java
+++ b/azkaban-common/src/main/java/azkaban/utils/SplitterOutputStream.java
@@ -22,32 +22,33 @@ import java.util.ArrayList;
 import java.util.List;
 
 public class SplitterOutputStream extends OutputStream {
+
   List<OutputStream> outputs;
 
-  public SplitterOutputStream(OutputStream... outputs) {
-    this.outputs = new ArrayList<OutputStream>(outputs.length);
-    for (OutputStream output : outputs) {
+  public SplitterOutputStream(final OutputStream... outputs) {
+    this.outputs = new ArrayList<>(outputs.length);
+    for (final OutputStream output : outputs) {
       this.outputs.add(output);
     }
   }
 
   @Override
-  public void write(int b) throws IOException {
-    for (OutputStream output : outputs) {
+  public void write(final int b) throws IOException {
+    for (final OutputStream output : this.outputs) {
       output.write(b);
     }
   }
 
   @Override
-  public void write(byte[] b) throws IOException {
-    for (OutputStream output : outputs) {
+  public void write(final byte[] b) throws IOException {
+    for (final OutputStream output : this.outputs) {
       output.write(b);
     }
   }
 
   @Override
-  public void write(byte[] b, int off, int len) throws IOException {
-    for (OutputStream output : outputs) {
+  public void write(final byte[] b, final int off, final int len) throws IOException {
+    for (final OutputStream output : this.outputs) {
       output.write(b, off, len);
     }
   }
@@ -55,10 +56,10 @@ public class SplitterOutputStream extends OutputStream {
   @Override
   public void flush() throws IOException {
     IOException exception = null;
-    for (OutputStream output : outputs) {
+    for (final OutputStream output : this.outputs) {
       try {
         output.flush();
-      } catch (IOException e) {
+      } catch (final IOException e) {
         exception = e;
       }
     }
@@ -70,10 +71,10 @@ public class SplitterOutputStream extends OutputStream {
   @Override
   public void close() throws IOException {
     IOException exception = null;
-    for (OutputStream output : outputs) {
+    for (final OutputStream output : this.outputs) {
       try {
         output.close();
-      } catch (IOException e) {
+      } catch (final IOException e) {
         exception = e;
       }
     }
diff --git a/azkaban-common/src/main/java/azkaban/utils/StdOutErrRedirect.java b/azkaban-common/src/main/java/azkaban/utils/StdOutErrRedirect.java
index f921267..00b9967 100644
--- a/azkaban-common/src/main/java/azkaban/utils/StdOutErrRedirect.java
+++ b/azkaban-common/src/main/java/azkaban/utils/StdOutErrRedirect.java
@@ -18,7 +18,6 @@ package azkaban.utils;
 
 import java.io.OutputStream;
 import java.io.PrintStream;
-
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
 
@@ -29,6 +28,7 @@ import org.apache.log4j.Logger;
  */
 
 public class StdOutErrRedirect {
+
   private static final Logger logger = Logger.getLogger(StdOutErrRedirect.class);
   private static final PrintStream infoStream = createStream(System.out, Level.INFO);
   private static final PrintStream errorStream = createStream(System.out, Level.ERROR);
@@ -38,21 +38,22 @@ public class StdOutErrRedirect {
     System.setErr(errorStream);
   }
 
-  private static PrintStream createStream(PrintStream stream, Level level) {
+  private static PrintStream createStream(final PrintStream stream, final Level level) {
     return new LogStream(stream, level);
-  };
+  }
 
   private static class LogStream extends PrintStream {
+
     private final Level level;
 
-    public LogStream(OutputStream out, Level level) {
+    public LogStream(final OutputStream out, final Level level) {
       super(out);
       this.level = level;
     }
 
     // Underlying mechanism to log to log4j - all print methods will use this
-    private void write(String string) {
-      logger.log(level, string);
+    private void write(final String string) {
+      logger.log(this.level, string);
     }
 
     // String
@@ -69,89 +70,89 @@ public class StdOutErrRedirect {
     // Boolean
     @Override
     public void println(final boolean bool) {
-        print(bool);
+      print(bool);
     }
 
     @Override
     public void print(final boolean bool) {
-        write(String.valueOf(bool));
+      write(String.valueOf(bool));
     }
 
     // Int
     @Override
     public void println(final int i) {
-        print(i);
+      print(i);
     }
 
     @Override
     public void print(final int i) {
-        write(String.valueOf(i));
+      write(String.valueOf(i));
     }
 
     // Float
     @Override
     public void println(final float f) {
-        print(f);
+      print(f);
     }
 
     @Override
     public void print(final float f) {
-        write(String.valueOf(f));
+      write(String.valueOf(f));
     }
 
     // Char
     @Override
     public void println(final char c) {
-        print(c);
+      print(c);
     }
 
     @Override
     public void print(final char c) {
-        write(String.valueOf(c));
+      write(String.valueOf(c));
     }
 
     // Long
     @Override
     public void println(final long l) {
-        print(l);
+      print(l);
     }
 
     @Override
     public void print(final long l) {
-        write(String.valueOf(l));
+      write(String.valueOf(l));
     }
 
     // Double
     @Override
     public void println(final double d) {
-        print(d);
+      print(d);
     }
 
     @Override
     public void print(final double d) {
-        write(String.valueOf(d));
+      write(String.valueOf(d));
     }
 
     // Char []
     @Override
     public void println(final char[] c) {
-        print(c);
+      print(c);
     }
 
     @Override
     public void print(final char[] c) {
-        write(new String(c));
+      write(new String(c));
     }
 
     // Object
     @Override
     public void println(final Object o) {
-        print(o);
+      print(o);
     }
 
     @Override
     public void print(final Object o) {
-        write(o.toString());
+      write(o.toString());
     }
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/StringUtils.java b/azkaban-common/src/main/java/azkaban/utils/StringUtils.java
index 924afd2..bd27614 100644
--- a/azkaban-common/src/main/java/azkaban/utils/StringUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/StringUtils.java
@@ -21,11 +21,14 @@ import java.util.List;
 import java.util.regex.Pattern;
 
 public class StringUtils {
+
   public static final char SINGLE_QUOTE = '\'';
   public static final char DOUBLE_QUOTE = '\"';
+  private static final Pattern BROWSWER_PATTERN = Pattern
+      .compile(".*Gecko.*|.*AppleWebKit.*|.*Trident.*|.*Chrome.*");
 
-  public static String shellQuote(String s, char quoteCh) {
-    StringBuffer buf = new StringBuffer(s.length() + 2);
+  public static String shellQuote(final String s, final char quoteCh) {
+    final StringBuffer buf = new StringBuffer(s.length() + 2);
 
     buf.append(quoteCh);
     for (int i = 0; i < s.length(); i++) {
@@ -41,9 +44,9 @@ public class StringUtils {
   }
 
   @Deprecated
-  public static String join(List<String> list, String delimiter) {
-    StringBuffer buffer = new StringBuffer();
-    for (String str : list) {
+  public static String join(final List<String> list, final String delimiter) {
+    final StringBuffer buffer = new StringBuffer();
+    for (final String str : list) {
       buffer.append(str);
       buffer.append(delimiter);
     }
@@ -53,14 +56,10 @@ public class StringUtils {
 
   /**
    * Use this when you don't want to include Apache Common's string for plugins.
-   *
-   * @param list
-   * @param delimiter
-   * @return
    */
-  public static String join(Collection<String> list, String delimiter) {
-    StringBuffer buffer = new StringBuffer();
-    for (String str : list) {
+  public static String join(final Collection<String> list, final String delimiter) {
+    final StringBuffer buffer = new StringBuffer();
+    for (final String str : list) {
       buffer.append(str);
       buffer.append(delimiter);
     }
@@ -70,15 +69,13 @@ public class StringUtils {
 
   /**
    * Don't bother to add delimiter for last element
-   * 
-   * @param list
-   * @param delimiter
+   *
    * @return String - elements in the list separated by delimiter
    */
-  public static String join2(Collection<String> list, String delimiter) {
-    StringBuffer buffer = new StringBuffer();
+  public static String join2(final Collection<String> list, final String delimiter) {
+    final StringBuffer buffer = new StringBuffer();
     boolean first = true;
-    for (String str : list) {
+    for (final String str : list) {
       if (!first) {
         buffer.append(delimiter);
       }
@@ -90,10 +87,7 @@ public class StringUtils {
     return buffer.toString();
   }
 
-  private static final Pattern BROWSWER_PATTERN = Pattern
-      .compile(".*Gecko.*|.*AppleWebKit.*|.*Trident.*|.*Chrome.*");
-
-  public static boolean isFromBrowser(String userAgent) {
+  public static boolean isFromBrowser(final String userAgent) {
     if (userAgent == null) {
       return false;
     }
diff --git a/azkaban-common/src/main/java/azkaban/utils/SwapQueue.java b/azkaban-common/src/main/java/azkaban/utils/SwapQueue.java
index 0caae71..a13b5f3 100644
--- a/azkaban-common/src/main/java/azkaban/utils/SwapQueue.java
+++ b/azkaban-common/src/main/java/azkaban/utils/SwapQueue.java
@@ -25,12 +25,13 @@ import java.util.Iterator;
  * should be called before every read.
  */
 public class SwapQueue<T> implements Iterable<T> {
+
   ArrayList<T> primaryQueue;
   ArrayList<T> secondaryQueue;
 
   public SwapQueue() {
-    primaryQueue = new ArrayList<T>();
-    secondaryQueue = new ArrayList<T>();
+    this.primaryQueue = new ArrayList<>();
+    this.secondaryQueue = new ArrayList<>();
   }
 
   /**
@@ -38,38 +39,34 @@ public class SwapQueue<T> implements Iterable<T> {
    * released.
    */
   public synchronized void swap() {
-    primaryQueue = secondaryQueue;
-    secondaryQueue = new ArrayList<T>();
+    this.primaryQueue = this.secondaryQueue;
+    this.secondaryQueue = new ArrayList<>();
   }
 
   /**
    * Returns a count of the secondary queue.
-   *
-   * @return
    */
   public synchronized int getSwapQueueSize() {
-    return secondaryQueue.size();
+    return this.secondaryQueue.size();
   }
 
   public synchronized int getPrimarySize() {
-    return primaryQueue.size();
+    return this.primaryQueue.size();
   }
 
-  public synchronized void addAll(Collection<T> col) {
-    secondaryQueue.addAll(col);
+  public synchronized void addAll(final Collection<T> col) {
+    this.secondaryQueue.addAll(col);
   }
 
   /**
    * Returns both the secondary and primary size
-   *
-   * @return
    */
   public synchronized int getSize() {
-    return secondaryQueue.size() + primaryQueue.size();
+    return this.secondaryQueue.size() + this.primaryQueue.size();
   }
 
-  public synchronized void add(T element) {
-    secondaryQueue.add(element);
+  public synchronized void add(final T element) {
+    this.secondaryQueue.add(element);
   }
 
   /**
@@ -77,6 +74,6 @@ public class SwapQueue<T> implements Iterable<T> {
    */
   @Override
   public synchronized Iterator<T> iterator() {
-    return primaryQueue.iterator();
+    return this.primaryQueue.iterator();
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/SystemMemoryInfo.java b/azkaban-common/src/main/java/azkaban/utils/SystemMemoryInfo.java
index af2cc4e..e71c999 100644
--- a/azkaban-common/src/main/java/azkaban/utils/SystemMemoryInfo.java
+++ b/azkaban-common/src/main/java/azkaban/utils/SystemMemoryInfo.java
@@ -15,13 +15,13 @@ import org.slf4j.LoggerFactory;
  * All the memory size used in this function is in KB.
  */
 public class SystemMemoryInfo {
-  private final OsMemoryUtil util;
 
   private static final org.slf4j.Logger logger = LoggerFactory.getLogger(SystemMemoryInfo.class);
   private static final long LOW_MEM_THRESHOLD = 3L * 1024L * 1024L; //3 GB
+  private final OsMemoryUtil util;
 
   @Inject
-  public SystemMemoryInfo(OsMemoryUtil util) {
+  public SystemMemoryInfo(final OsMemoryUtil util) {
     this.util = util;
   }
 
@@ -29,11 +29,11 @@ public class SystemMemoryInfo {
    * @param xmx Xmx for the process
    * @return true if the system can satisfy the memory request
    *
-   * Given Xmx value (in kb) used by java process, determine if system can
-   * satisfy the memory request
+   * Given Xmx value (in kb) used by java process, determine if system can satisfy the memory
+   * request
    */
-  public boolean canSystemGrantMemory(long xmx) {
-    long  freeMemSize = util.getOsTotalFreeMemorySize();
+  public boolean canSystemGrantMemory(final long xmx) {
+    final long freeMemSize = this.util.getOsTotalFreeMemorySize();
     if (freeMemSize == 0) {
       // Fail open.
       // On the platforms that don't support the mem info file, the returned size will be 0.
diff --git a/azkaban-common/src/main/java/azkaban/utils/ThreadPoolExecutingListener.java b/azkaban-common/src/main/java/azkaban/utils/ThreadPoolExecutingListener.java
index a7c85a4..d7d377b 100644
--- a/azkaban-common/src/main/java/azkaban/utils/ThreadPoolExecutingListener.java
+++ b/azkaban-common/src/main/java/azkaban/utils/ThreadPoolExecutingListener.java
@@ -18,11 +18,11 @@ package azkaban.utils;
 /**
  * Interface for listener to get notified before and after a task has been
  * executed.
- * 
+ *
  * @author hluu
- * 
  */
 public interface ThreadPoolExecutingListener {
+
   public void beforeExecute(Runnable r);
 
   public void afterExecute(Runnable r);
diff --git a/azkaban-common/src/main/java/azkaban/utils/TrackingThreadPool.java b/azkaban-common/src/main/java/azkaban/utils/TrackingThreadPool.java
index 4d3e7fe..6fc918f 100644
--- a/azkaban-common/src/main/java/azkaban/utils/TrackingThreadPool.java
+++ b/azkaban-common/src/main/java/azkaban/utils/TrackingThreadPool.java
@@ -22,7 +22,6 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
-
 import org.apache.log4j.Logger;
 
 /**
@@ -33,15 +32,14 @@ import org.apache.log4j.Logger;
  * Instrumenting applications with JMX"
  *
  * @author hluu
- *
  */
 public class TrackingThreadPool extends ThreadPoolExecutor {
 
-  private static Logger logger = Logger.getLogger(TrackingThreadPool.class);
+  private static final Logger logger = Logger.getLogger(TrackingThreadPool.class);
 
   private final Map<Runnable, Boolean> inProgress =
-      new ConcurrentHashMap<Runnable, Boolean>();
-  private final ThreadLocal<Long> startTime = new ThreadLocal<Long>();
+      new ConcurrentHashMap<>();
+  private final ThreadLocal<Long> startTime = new ThreadLocal<>();
 
   private ThreadPoolExecutingListener executingListener =
       new NoOpThreadPoolExecutingListener();
@@ -49,66 +47,66 @@ public class TrackingThreadPool extends ThreadPoolExecutor {
   private long totalTime;
   private int totalTasks;
 
-  public TrackingThreadPool(int corePoolSize, int maximumPoolSize,
-      long keepAliveTime, TimeUnit unit, BlockingQueue<Runnable> workQueue,
-      ThreadPoolExecutingListener listener) {
+  public TrackingThreadPool(final int corePoolSize, final int maximumPoolSize,
+      final long keepAliveTime, final TimeUnit unit, final BlockingQueue<Runnable> workQueue,
+      final ThreadPoolExecutingListener listener) {
     super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue);
     if (listener != null) {
-      executingListener = listener;
+      this.executingListener = listener;
     }
   }
 
   @Override
-  protected void beforeExecute(Thread t, Runnable r) {
+  protected void beforeExecute(final Thread t, final Runnable r) {
     try {
-      executingListener.beforeExecute(r);
-    } catch (Throwable e) {
+      this.executingListener.beforeExecute(r);
+    } catch (final Throwable e) {
       // to ensure the listener doesn't cause any issues
       logger.warn("Listener threw exception", e);
     }
     super.beforeExecute(t, r);
-    inProgress.put(r, Boolean.TRUE);
-    startTime.set(Long.valueOf(System.currentTimeMillis()));
+    this.inProgress.put(r, Boolean.TRUE);
+    this.startTime.set(Long.valueOf(System.currentTimeMillis()));
   }
 
   @Override
-  protected void afterExecute(Runnable r, Throwable t) {
-    long time = System.currentTimeMillis() - startTime.get().longValue();
+  protected void afterExecute(final Runnable r, final Throwable t) {
+    final long time = System.currentTimeMillis() - this.startTime.get().longValue();
     synchronized (this) {
-      totalTime += time;
-      ++totalTasks;
+      this.totalTime += time;
+      ++this.totalTasks;
     }
-    inProgress.remove(r);
+    this.inProgress.remove(r);
     super.afterExecute(r, t);
     try {
-      executingListener.afterExecute(r);
-    } catch (Throwable e) {
+      this.executingListener.afterExecute(r);
+    } catch (final Throwable e) {
       // to ensure the listener doesn't cause any issues
       logger.warn("Listener threw exception", e);
     }
   }
 
   public Set<Runnable> getInProgressTasks() {
-    return Collections.unmodifiableSet(inProgress.keySet());
+    return Collections.unmodifiableSet(this.inProgress.keySet());
   }
 
   public synchronized int getTotalTasks() {
-    return totalTasks;
+    return this.totalTasks;
   }
 
   public synchronized double getAverageTaskTime() {
-    return (totalTasks == 0) ? 0 : totalTime / totalTasks;
+    return (this.totalTasks == 0) ? 0 : this.totalTime / this.totalTasks;
   }
 
   private static class NoOpThreadPoolExecutingListener implements
       ThreadPoolExecutingListener {
 
     @Override
-    public void beforeExecute(Runnable r) {
+    public void beforeExecute(final Runnable r) {
     }
 
     @Override
-    public void afterExecute(Runnable r) {
+    public void afterExecute(final Runnable r) {
     }
   }
 }
\ No newline at end of file
diff --git a/azkaban-common/src/main/java/azkaban/utils/Triple.java b/azkaban-common/src/main/java/azkaban/utils/Triple.java
index e713f47..740304d 100644
--- a/azkaban-common/src/main/java/azkaban/utils/Triple.java
+++ b/azkaban-common/src/main/java/azkaban/utils/Triple.java
@@ -20,63 +20,72 @@ package azkaban.utils;
  * Like pair, but with 3 values.
  */
 public class Triple<F, S, T> {
+
   private final F first;
   private final S second;
   private final T third;
 
-  public Triple(F first, S second, T third) {
+  public Triple(final F first, final S second, final T third) {
     this.first = first;
     this.second = second;
     this.third = third;
   }
 
   public F getFirst() {
-    return first;
+    return this.first;
   }
 
   public S getSecond() {
-    return second;
+    return this.second;
   }
 
   public T getThird() {
-    return third;
+    return this.third;
   }
 
   @Override
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + ((first == null) ? 0 : first.hashCode());
-    result = prime * result + ((second == null) ? 0 : second.hashCode());
-    result = prime * result + ((third == null) ? 0 : third.hashCode());
+    result = prime * result + ((this.first == null) ? 0 : this.first.hashCode());
+    result = prime * result + ((this.second == null) ? 0 : this.second.hashCode());
+    result = prime * result + ((this.third == null) ? 0 : this.third.hashCode());
     return result;
   }
 
   @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
+  public boolean equals(final Object obj) {
+    if (this == obj) {
       return true;
-    if (obj == null)
+    }
+    if (obj == null) {
       return false;
-    if (getClass() != obj.getClass())
+    }
+    if (getClass() != obj.getClass()) {
       return false;
-    @SuppressWarnings("rawtypes")
-    Triple other = (Triple) obj;
-    if (first == null) {
-      if (other.first != null)
+    }
+    final Triple other = (Triple) obj;
+    if (this.first == null) {
+      if (other.first != null) {
         return false;
-    } else if (!first.equals(other.first))
+      }
+    } else if (!this.first.equals(other.first)) {
       return false;
-    if (second == null) {
-      if (other.second != null)
+    }
+    if (this.second == null) {
+      if (other.second != null) {
         return false;
-    } else if (!second.equals(other.second))
+      }
+    } else if (!this.second.equals(other.second)) {
       return false;
-    if (third == null) {
-      if (other.third != null)
+    }
+    if (this.third == null) {
+      if (other.third != null) {
         return false;
-    } else if (!third.equals(other.third))
+      }
+    } else if (!this.third.equals(other.third)) {
       return false;
+    }
     return true;
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/utils/TypedMapWrapper.java b/azkaban-common/src/main/java/azkaban/utils/TypedMapWrapper.java
index 62208f3..b7a157d 100644
--- a/azkaban-common/src/main/java/azkaban/utils/TypedMapWrapper.java
+++ b/azkaban-common/src/main/java/azkaban/utils/TypedMapWrapper.java
@@ -21,18 +21,19 @@ import java.util.List;
 import java.util.Map;
 
 public class TypedMapWrapper<K, V> {
-  private Map<K, V> map;
 
-  public TypedMapWrapper(Map<K, V> map) {
+  private final Map<K, V> map;
+
+  public TypedMapWrapper(final Map<K, V> map) {
     this.map = map;
   }
 
-  public String getString(K key) {
+  public String getString(final K key) {
     return getString(key, null);
   }
 
-  public String getString(K key, String defaultVal) {
-    Object obj = map.get(key);
+  public String getString(final K key, final String defaultVal) {
+    final Object obj = this.map.get(key);
     if (obj == null) {
       return defaultVal;
     }
@@ -43,8 +44,8 @@ public class TypedMapWrapper<K, V> {
     return obj.toString();
   }
 
-  public Boolean getBool(K key, Boolean defaultVal) {
-    Object obj = map.get(key);
+  public Boolean getBool(final K key, final Boolean defaultVal) {
+    final Object obj = this.map.get(key);
     if (obj == null) {
       return defaultVal;
     }
@@ -52,12 +53,12 @@ public class TypedMapWrapper<K, V> {
     return (Boolean) obj;
   }
 
-  public Integer getInt(K key) {
+  public Integer getInt(final K key) {
     return getInt(key, -1);
   }
 
-  public Integer getInt(K key, Integer defaultVal) {
-    Object obj = map.get(key);
+  public Integer getInt(final K key, final Integer defaultVal) {
+    final Object obj = this.map.get(key);
     if (obj == null) {
       return defaultVal;
     }
@@ -70,12 +71,12 @@ public class TypedMapWrapper<K, V> {
     }
   }
 
-  public Long getLong(K key) {
+  public Long getLong(final K key) {
     return getLong(key, -1L);
   }
 
-  public Long getLong(K key, Long defaultVal) {
-    Object obj = map.get(key);
+  public Long getLong(final K key, final Long defaultVal) {
+    final Object obj = this.map.get(key);
     if (obj == null) {
       return defaultVal;
     }
@@ -90,16 +91,14 @@ public class TypedMapWrapper<K, V> {
     }
   }
 
-  @SuppressWarnings("unchecked")
-  public Collection<String> getStringCollection(K key) {
-    Object obj = map.get(key);
+  public Collection<String> getStringCollection(final K key) {
+    final Object obj = this.map.get(key);
     return (Collection<String>) obj;
   }
 
-  @SuppressWarnings("unchecked")
-  public Collection<String> getStringCollection(K key,
-      Collection<String> defaultVal) {
-    Object obj = map.get(key);
+  public Collection<String> getStringCollection(final K key,
+      final Collection<String> defaultVal) {
+    final Object obj = this.map.get(key);
     if (obj == null) {
       return defaultVal;
     }
@@ -107,47 +106,43 @@ public class TypedMapWrapper<K, V> {
     return (Collection<String>) obj;
   }
 
-  @SuppressWarnings("unchecked")
-  public <C> Collection<C> getCollection(K key) {
-    Object obj = map.get(key);
+  public <C> Collection<C> getCollection(final K key) {
+    final Object obj = this.map.get(key);
     if (obj instanceof Collection) {
       return (Collection<C>) obj;
     }
     return null;
   }
 
-  @SuppressWarnings("unchecked")
-  public <L> List<L> getList(K key) {
-    Object obj = map.get(key);
+  public <L> List<L> getList(final K key) {
+    final Object obj = this.map.get(key);
     if (obj instanceof List) {
       return (List<L>) obj;
     }
     return null;
   }
 
-  @SuppressWarnings("unchecked")
-  public <L> List<L> getList(K key, List<L> defaultVal) {
-    Object obj = map.get(key);
+  public <L> List<L> getList(final K key, final List<L> defaultVal) {
+    final Object obj = this.map.get(key);
     if (obj instanceof List) {
       return (List<L>) obj;
     }
     return defaultVal;
   }
 
-  public Object getObject(K key) {
-    return map.get(key);
+  public Object getObject(final K key) {
+    return this.map.get(key);
   }
 
   public Map<K, V> getMap() {
-    return map;
+    return this.map;
   }
 
-  @SuppressWarnings("unchecked")
-  public <S, T> Map<S, T> getMap(K key) {
-    return (Map<S, T>) map.get(key);
+  public <S, T> Map<S, T> getMap(final K key) {
+    return (Map<S, T>) this.map.get(key);
   }
 
-  public boolean containsKey(K key) {
-    return map.containsKey(key);
+  public boolean containsKey(final K key) {
+    return this.map.containsKey(key);
   }
 }
diff --git a/azkaban-common/src/main/java/azkaban/utils/UndefinedPropertyException.java b/azkaban-common/src/main/java/azkaban/utils/UndefinedPropertyException.java
index 239dc99..9c6f29a 100644
--- a/azkaban-common/src/main/java/azkaban/utils/UndefinedPropertyException.java
+++ b/azkaban-common/src/main/java/azkaban/utils/UndefinedPropertyException.java
@@ -23,7 +23,7 @@ public class UndefinedPropertyException extends RuntimeException {
 
   private static final long serialVersionUID = 1;
 
-  public UndefinedPropertyException(String message) {
+  public UndefinedPropertyException(final String message) {
     super(message);
   }
 
diff --git a/azkaban-common/src/main/java/azkaban/utils/Utils.java b/azkaban-common/src/main/java/azkaban/utils/Utils.java
index 48ed0ab..d024bd2 100644
--- a/azkaban-common/src/main/java/azkaban/utils/Utils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/Utils.java
@@ -27,6 +27,7 @@ import java.io.OutputStream;
 import java.lang.reflect.Constructor;
 import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
+import java.text.ParseException;
 import java.util.Collection;
 import java.util.Date;
 import java.util.Enumeration;
@@ -35,14 +36,10 @@ import java.util.TimeZone;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipFile;
 import java.util.zip.ZipOutputStream;
-import java.text.ParseException;
-
 import org.apache.commons.io.IOUtils;
-
 import org.apache.log4j.Logger;
-
-import org.joda.time.DateTimeZone;
 import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
 import org.joda.time.Days;
 import org.joda.time.DurationFieldType;
 import org.joda.time.Hours;
@@ -52,7 +49,6 @@ import org.joda.time.ReadablePeriod;
 import org.joda.time.Seconds;
 import org.joda.time.Weeks;
 import org.joda.time.Years;
-
 import org.quartz.CronExpression;
 
 /**
@@ -60,9 +56,9 @@ import org.quartz.CronExpression;
  */
 public class Utils {
 
-  private static Logger logger = Logger
-      .getLogger(Utils.class);
   public static final Random RANDOM = new Random();
+  private static final Logger logger = Logger
+      .getLogger(Utils.class);
 
   /**
    * Private constructor.
@@ -73,12 +69,8 @@ public class Utils {
   /**
    * Equivalent to Object.equals except that it handles nulls. If a and b are
    * both null, true is returned.
-   *
-   * @param a
-   * @param b
-   * @return
    */
-  public static boolean equals(Object a, Object b) {
+  public static boolean equals(final Object a, final Object b) {
     if (a == null || b == null) {
       return a == b;
     }
@@ -94,7 +86,7 @@ public class Utils {
    * @return The object if it is not null
    * @throws IllegalArgumentException if the object is null
    */
-  public static <T> T nonNull(T t) {
+  public static <T> T nonNull(final T t) {
     if (t == null) {
       throw new IllegalArgumentException("Null value not allowed.");
     } else {
@@ -102,9 +94,9 @@ public class Utils {
     }
   }
 
-  public static File findFilefromDir(File dir, String fn) {
+  public static File findFilefromDir(final File dir, final String fn) {
     if (dir.isDirectory()) {
-      for (File f : dir.listFiles()) {
+      for (final File f : dir.listFiles()) {
         if (f.getName().equals(fn)) {
           return f;
         }
@@ -119,7 +111,7 @@ public class Utils {
    * @param message The message to print
    * @param exitCode The exit code
    */
-  public static void croak(String message, int exitCode) {
+  public static void croak(final String message, final int exitCode) {
     System.err.println(message);
     System.exit(exitCode);
   }
@@ -127,10 +119,9 @@ public class Utils {
   /**
    * Tests whether a port is valid or not
    *
-   * @param port
    * @return true, if port is valid
    */
-  public static boolean isValidPort(int port) {
+  public static boolean isValidPort(final int port) {
     if (port >= 1 && port <= 65535) {
       return true;
     }
@@ -141,8 +132,8 @@ public class Utils {
     return createTempDir(new File(System.getProperty("java.io.tmpdir")));
   }
 
-  public static File createTempDir(File parent) {
-    File temp =
+  public static File createTempDir(final File parent) {
+    final File temp =
         new File(parent,
             Integer.toString(Math.abs(RANDOM.nextInt()) % 100000000));
     temp.delete();
@@ -151,9 +142,9 @@ public class Utils {
     return temp;
   }
 
-  public static void zip(File input, File output) throws IOException {
-    FileOutputStream out = new FileOutputStream(output);
-    ZipOutputStream zOut = new ZipOutputStream(out);
+  public static void zip(final File input, final File output) throws IOException {
+    final FileOutputStream out = new FileOutputStream(output);
+    final ZipOutputStream zOut = new ZipOutputStream(out);
     try {
       zipFile("", input, zOut);
     } finally {
@@ -161,14 +152,14 @@ public class Utils {
     }
   }
 
-  public static void zipFolderContent(File folder, File output)
+  public static void zipFolderContent(final File folder, final File output)
       throws IOException {
-    FileOutputStream out = new FileOutputStream(output);
-    ZipOutputStream zOut = new ZipOutputStream(out);
+    final FileOutputStream out = new FileOutputStream(output);
+    final ZipOutputStream zOut = new ZipOutputStream(out);
     try {
-      File[] files = folder.listFiles();
+      final File[] files = folder.listFiles();
       if (files != null) {
-        for (File f : files) {
+        for (final File f : files) {
           zipFile("", f, zOut);
         }
       }
@@ -177,23 +168,23 @@ public class Utils {
     }
   }
 
-  private static void zipFile(String path, File input, ZipOutputStream zOut)
+  private static void zipFile(final String path, final File input, final ZipOutputStream zOut)
       throws IOException {
     if (input.isDirectory()) {
-      File[] files = input.listFiles();
+      final File[] files = input.listFiles();
       if (files != null) {
-        for (File f : files) {
-          String childPath =
+        for (final File f : files) {
+          final String childPath =
               path + input.getName() + (f.isDirectory() ? "/" : "");
           zipFile(childPath, f, zOut);
         }
       }
     } else {
-      String childPath =
+      final String childPath =
           path + (path.length() > 0 ? "/" : "") + input.getName();
-      ZipEntry entry = new ZipEntry(childPath);
+      final ZipEntry entry = new ZipEntry(childPath);
       zOut.putNextEntry(entry);
-      InputStream fileInputStream =
+      final InputStream fileInputStream =
           new BufferedInputStream(new FileInputStream(input));
       try {
         IOUtils.copy(fileInputStream, zOut);
@@ -203,18 +194,18 @@ public class Utils {
     }
   }
 
-  public static void unzip(ZipFile source, File dest) throws IOException {
-    Enumeration<?> entries = source.entries();
+  public static void unzip(final ZipFile source, final File dest) throws IOException {
+    final Enumeration<?> entries = source.entries();
     while (entries.hasMoreElements()) {
-      ZipEntry entry = (ZipEntry) entries.nextElement();
-      File newFile = new File(dest, entry.getName());
+      final ZipEntry entry = (ZipEntry) entries.nextElement();
+      final File newFile = new File(dest, entry.getName());
       if (entry.isDirectory()) {
         newFile.mkdirs();
       } else {
         newFile.getParentFile().mkdirs();
-        InputStream src = source.getInputStream(entry);
+        final InputStream src = source.getInputStream(entry);
         try {
-          OutputStream output =
+          final OutputStream output =
               new BufferedOutputStream(new FileOutputStream(newFile));
           try {
             IOUtils.copy(src, output);
@@ -228,10 +219,10 @@ public class Utils {
     }
   }
 
-  public static String flattenToString(Collection<?> collection,
-      String delimiter) {
-    StringBuffer buffer = new StringBuffer();
-    for (Object obj : collection) {
+  public static String flattenToString(final Collection<?> collection,
+      final String delimiter) {
+    final StringBuffer buffer = new StringBuffer();
+    for (final Object obj : collection) {
       buffer.append(obj.toString());
       buffer.append(',');
     }
@@ -242,7 +233,7 @@ public class Utils {
     return buffer.toString();
   }
 
-  public static Double convertToDouble(Object obj) {
+  public static Double convertToDouble(final Object obj) {
     if (obj instanceof String) {
       return Double.parseDouble((String) obj);
     }
@@ -256,12 +247,13 @@ public class Utils {
    * @param e The Exception
    * @return The root cause of the Exception
    */
-  private static RuntimeException getCause(InvocationTargetException e) {
-    Throwable cause = e.getCause();
-    if (cause instanceof RuntimeException)
+  private static RuntimeException getCause(final InvocationTargetException e) {
+    final Throwable cause = e.getCause();
+    if (cause instanceof RuntimeException) {
       throw (RuntimeException) cause;
-    else
+    } else {
       throw new IllegalStateException(e.getCause());
+    }
   }
 
   /**
@@ -270,14 +262,15 @@ public class Utils {
    * @param args The objects to get the Classes from
    * @return The classes as an array
    */
-  public static Class<?>[] getTypes(Object... args) {
-    Class<?>[] argTypes = new Class<?>[args.length];
-    for (int i = 0; i < argTypes.length; i++)
+  public static Class<?>[] getTypes(final Object... args) {
+    final Class<?>[] argTypes = new Class<?>[args.length];
+    for (int i = 0; i < argTypes.length; i++) {
       argTypes[i] = args[i].getClass();
+    }
     return argTypes;
   }
 
-  public static Object callConstructor(Class<?> c, Object... args) {
+  public static Object callConstructor(final Class<?> c, final Object... args) {
     return callConstructor(c, getTypes(args), args);
   }
 
@@ -288,28 +281,28 @@ public class Utils {
    * @param args The arguments
    * @return The constructed object
    */
-  public static Object callConstructor(Class<?> c, Class<?>[] argTypes,
-      Object[] args) {
+  public static Object callConstructor(final Class<?> c, final Class<?>[] argTypes,
+      final Object[] args) {
     try {
-      Constructor<?> cons = c.getConstructor(argTypes);
+      final Constructor<?> cons = c.getConstructor(argTypes);
       return cons.newInstance(args);
-    } catch (InvocationTargetException e) {
+    } catch (final InvocationTargetException e) {
       throw getCause(e);
-    } catch (IllegalAccessException e) {
+    } catch (final IllegalAccessException e) {
       throw new IllegalStateException(e);
-    } catch (NoSuchMethodException e) {
+    } catch (final NoSuchMethodException e) {
       throw new IllegalStateException(e);
-    } catch (InstantiationException e) {
+    } catch (final InstantiationException e) {
       throw new IllegalStateException(e);
     }
   }
 
-  public static String formatDuration(long startTime, long endTime) {
+  public static String formatDuration(final long startTime, final long endTime) {
     if (startTime == -1) {
       return "-";
     }
 
-    long durationMS;
+    final long durationMS;
     if (endTime == -1) {
       durationMS = DateTime.now().getMillis() - startTime;
     } else {
@@ -333,76 +326,76 @@ public class Utils {
       return hours + "h " + minutes + "m " + seconds + "s";
     }
 
-    long days = hours / 24;
+    final long days = hours / 24;
     hours %= 24;
     return days + "d " + hours + "h " + minutes + "m";
   }
 
-  public static Object invokeStaticMethod(ClassLoader loader, String className,
-      String methodName, Object... args) throws ClassNotFoundException,
+  public static Object invokeStaticMethod(final ClassLoader loader, final String className,
+      final String methodName, final Object... args) throws ClassNotFoundException,
       SecurityException, NoSuchMethodException, IllegalArgumentException,
       IllegalAccessException, InvocationTargetException {
-    Class<?> clazz = loader.loadClass(className);
+    final Class<?> clazz = loader.loadClass(className);
 
-    Class<?>[] argTypes = new Class[args.length];
+    final Class<?>[] argTypes = new Class[args.length];
     for (int i = 0; i < args.length; ++i) {
       // argTypes[i] = args[i].getClass();
       argTypes[i] = args[i].getClass();
     }
 
-    Method method = clazz.getDeclaredMethod(methodName, argTypes);
+    final Method method = clazz.getDeclaredMethod(methodName, argTypes);
     return method.invoke(null, args);
   }
 
-  public static void copyStream(InputStream input, OutputStream output)
+  public static void copyStream(final InputStream input, final OutputStream output)
       throws IOException {
-    byte[] buffer = new byte[1024];
+    final byte[] buffer = new byte[1024];
     int bytesRead;
     while ((bytesRead = input.read(buffer)) != -1) {
       output.write(buffer, 0, bytesRead);
     }
   }
 
-  public static ReadablePeriod parsePeriodString(String periodStr) {
-    ReadablePeriod period;
-    char periodUnit = periodStr.charAt(periodStr.length() - 1);
+  public static ReadablePeriod parsePeriodString(final String periodStr) {
+    final ReadablePeriod period;
+    final char periodUnit = periodStr.charAt(periodStr.length() - 1);
     if (periodStr.equals("null") || periodUnit == 'n') {
       return null;
     }
 
-    int periodInt =
+    final int periodInt =
         Integer.parseInt(periodStr.substring(0, periodStr.length() - 1));
     switch (periodUnit) {
-    case 'y':
-      period = Years.years(periodInt);
-      break;
-    case 'M':
-      period = Months.months(periodInt);
-      break;
-    case 'w':
-      period = Weeks.weeks(periodInt);
-      break;
-    case 'd':
-      period = Days.days(periodInt);
-      break;
-    case 'h':
-      period = Hours.hours(periodInt);
-      break;
-    case 'm':
-      period = Minutes.minutes(periodInt);
-      break;
-    case 's':
-      period = Seconds.seconds(periodInt);
-      break;
-    default:
-      throw new IllegalArgumentException("Invalid schedule period unit '"
-          + periodUnit);
+      case 'y':
+        period = Years.years(periodInt);
+        break;
+      case 'M':
+        period = Months.months(periodInt);
+        break;
+      case 'w':
+        period = Weeks.weeks(periodInt);
+        break;
+      case 'd':
+        period = Days.days(periodInt);
+        break;
+      case 'h':
+        period = Hours.hours(periodInt);
+        break;
+      case 'm':
+        period = Minutes.minutes(periodInt);
+        break;
+      case 's':
+        period = Seconds.seconds(periodInt);
+        break;
+      default:
+        throw new IllegalArgumentException("Invalid schedule period unit '"
+            + periodUnit);
     }
 
     return period;
   }
 
-  public static String createPeriodString(ReadablePeriod period) {
+  public static String createPeriodString(final ReadablePeriod period) {
     String periodStr = "null";
 
     if (period == null) {
@@ -410,25 +403,25 @@ public class Utils {
     }
 
     if (period.get(DurationFieldType.years()) > 0) {
-      int years = period.get(DurationFieldType.years());
+      final int years = period.get(DurationFieldType.years());
       periodStr = years + "y";
     } else if (period.get(DurationFieldType.months()) > 0) {
-      int months = period.get(DurationFieldType.months());
+      final int months = period.get(DurationFieldType.months());
       periodStr = months + "M";
     } else if (period.get(DurationFieldType.weeks()) > 0) {
-      int weeks = period.get(DurationFieldType.weeks());
+      final int weeks = period.get(DurationFieldType.weeks());
       periodStr = weeks + "w";
     } else if (period.get(DurationFieldType.days()) > 0) {
-      int days = period.get(DurationFieldType.days());
+      final int days = period.get(DurationFieldType.days());
       periodStr = days + "d";
     } else if (period.get(DurationFieldType.hours()) > 0) {
-      int hours = period.get(DurationFieldType.hours());
+      final int hours = period.get(DurationFieldType.hours());
       periodStr = hours + "h";
     } else if (period.get(DurationFieldType.minutes()) > 0) {
-      int minutes = period.get(DurationFieldType.minutes());
+      final int minutes = period.get(DurationFieldType.minutes());
       periodStr = minutes + "m";
     } else if (period.get(DurationFieldType.seconds()) > 0) {
-      int seconds = period.get(DurationFieldType.seconds());
+      final int seconds = period.get(DurationFieldType.seconds());
       periodStr = seconds + "s";
     }
 
@@ -439,7 +432,7 @@ public class Utils {
    * @param strMemSize : memory string in the format such as 1G, 500M, 3000K, 5000
    * @return : long value of memory amount in kb
    */
-  public static long parseMemString(String strMemSize) {
+  public static long parseMemString(final String strMemSize) {
     if (strMemSize == null) {
       return 0L;
     }
@@ -448,7 +441,7 @@ public class Utils {
     if (strMemSize.endsWith("g") || strMemSize.endsWith("G")
         || strMemSize.endsWith("m") || strMemSize.endsWith("M")
         || strMemSize.endsWith("k") || strMemSize.endsWith("K")) {
-      String strSize = strMemSize.substring(0, strMemSize.length() - 1);
+      final String strSize = strMemSize.substring(0, strMemSize.length() - 1);
       size = Long.parseLong(strSize);
     } else {
       size = Long.parseLong(strMemSize);
@@ -469,34 +462,36 @@ public class Utils {
   }
 
   /**
-   * @param cronExpression: A cron expression is a string separated by white space, to provide a parser and evaluator for Quartz cron expressions.
+   * @param cronExpression: A cron expression is a string separated by white space, to provide a
+   * parser and evaluator for Quartz cron expressions.
    * @return : org.quartz.CronExpression object.
    *
    * TODO: Currently, we have to transform Joda Timezone to Java Timezone due to CronExpression.
-   *       Since Java8 enhanced Time functionalities, We consider transform all Jodatime to Java Time in future.
-   *
+   * Since Java8 enhanced Time functionalities, We consider transform all Jodatime to Java Time in
+   * future.
    */
-  public static CronExpression parseCronExpression(String cronExpression, DateTimeZone timezone) {
+  public static CronExpression parseCronExpression(final String cronExpression,
+      final DateTimeZone timezone) {
     if (cronExpression != null) {
       try {
-        CronExpression ce =  new CronExpression(cronExpression);
+        final CronExpression ce = new CronExpression(cronExpression);
         ce.setTimeZone(TimeZone.getTimeZone(timezone.getID()));
         return ce;
-      } catch (ParseException pe) {
+      } catch (final ParseException pe) {
         logger.error("this cron expression {" + cronExpression + "} can not be parsed. "
             + "Please Check Quartz Cron Syntax.");
       }
       return null;
-    } else return null;
+    } else {
+      return null;
+    }
   }
 
   /**
-   *
-   * @param cronExpression
-   * @param timezone
    * @return if the cronExpression is valid or not.
    */
-  public static boolean isCronExpressionValid(String cronExpression, DateTimeZone timezone) {
+  public static boolean isCronExpressionValid(final String cronExpression,
+      final DateTimeZone timezone) {
     if (!CronExpression.isValidExpression(cronExpression)) {
       return false;
     }
@@ -505,7 +500,7 @@ public class Utils {
      * The below code is aimed at checking some cases that the above code can not identify,
      * e.g. <0 0 3 ? * * 22> OR <0 0 3 ? * 8>. Under these cases, the below code is able to tell.
      */
-    CronExpression cronExecutionTime = parseCronExpression(cronExpression, timezone);
+    final CronExpression cronExecutionTime = parseCronExpression(cronExpression, timezone);
     if (cronExecutionTime == null || cronExecutionTime.getNextValidTimeAfter(new Date()) == null) {
       return false;
     }
diff --git a/azkaban-common/src/main/java/azkaban/utils/WebUtils.java b/azkaban-common/src/main/java/azkaban/utils/WebUtils.java
index 916e441..0fe3ae1 100644
--- a/azkaban-common/src/main/java/azkaban/utils/WebUtils.java
+++ b/azkaban-common/src/main/java/azkaban/utils/WebUtils.java
@@ -16,27 +16,24 @@
 
 package azkaban.utils;
 
+import azkaban.executor.Status;
 import java.text.NumberFormat;
 import java.util.Map;
-
 import org.joda.time.DateTime;
 import org.joda.time.DurationFieldType;
 import org.joda.time.ReadablePeriod;
 import org.joda.time.format.DateTimeFormat;
 
-import azkaban.executor.Status;
-
 public class WebUtils {
-  public static final String DATE_TIME_STRING = "YYYY-MM-dd HH:mm:ss";
 
+  public static final String DATE_TIME_STRING = "YYYY-MM-dd HH:mm:ss";
+  public static final String X_FORWARDED_FOR_HEADER = "X-Forwarded-For";
   private static final long ONE_KB = 1024;
   private static final long ONE_MB = 1024 * ONE_KB;
   private static final long ONE_GB = 1024 * ONE_MB;
   private static final long ONE_TB = 1024 * ONE_GB;
 
-  public static final String X_FORWARDED_FOR_HEADER = "X-Forwarded-For";
-
-  public String formatDate(long timeMS) {
+  public String formatDate(final long timeMS) {
     if (timeMS == -1) {
       return "-";
     }
@@ -44,12 +41,12 @@ public class WebUtils {
     return DateTimeFormat.forPattern(DATE_TIME_STRING).print(timeMS);
   }
 
-  public String formatDuration(long startTime, long endTime) {
+  public String formatDuration(final long startTime, final long endTime) {
     if (startTime == -1) {
       return "-";
     }
 
-    long durationMS;
+    final long durationMS;
     if (endTime == -1) {
       durationMS = System.currentTimeMillis() - startTime;
     } else {
@@ -73,47 +70,47 @@ public class WebUtils {
       return hours + "h " + minutes + "m " + seconds + "s";
     }
 
-    long days = hours / 24;
+    final long days = hours / 24;
     hours %= 24;
     return days + "d " + hours + "h " + minutes + "m";
   }
 
-  public String formatStatus(Status status) {
+  public String formatStatus(final Status status) {
     switch (status) {
-    case SUCCEEDED:
-      return "Success";
-    case FAILED:
-      return "Failed";
-    case RUNNING:
-      return "Running";
-    case DISABLED:
-      return "Disabled";
-    case KILLED:
-      return "Killed";
-    case FAILED_FINISHING:
-      return "Running w/Failure";
-    case PREPARING:
-      return "Preparing";
-    case READY:
-      return "Ready";
-    case PAUSED:
-      return "Paused";
-    case SKIPPED:
-      return "Skipped";
-    default:
+      case SUCCEEDED:
+        return "Success";
+      case FAILED:
+        return "Failed";
+      case RUNNING:
+        return "Running";
+      case DISABLED:
+        return "Disabled";
+      case KILLED:
+        return "Killed";
+      case FAILED_FINISHING:
+        return "Running w/Failure";
+      case PREPARING:
+        return "Preparing";
+      case READY:
+        return "Ready";
+      case PAUSED:
+        return "Paused";
+      case SKIPPED:
+        return "Skipped";
+      default:
     }
     return "Unknown";
   }
 
-  public String formatDateTime(DateTime dt) {
+  public String formatDateTime(final DateTime dt) {
     return DateTimeFormat.forPattern(DATE_TIME_STRING).print(dt);
   }
 
-  public String formatDateTime(long timestamp) {
+  public String formatDateTime(final long timestamp) {
     return formatDateTime(new DateTime(timestamp));
   }
 
-  public String formatPeriod(ReadablePeriod period) {
+  public String formatPeriod(final ReadablePeriod period) {
     String periodStr = "null";
 
     if (period == null) {
@@ -121,51 +118,52 @@ public class WebUtils {
     }
 
     if (period.get(DurationFieldType.years()) > 0) {
-      int years = period.get(DurationFieldType.years());
+      final int years = period.get(DurationFieldType.years());
       periodStr = years + " year(s)";
     } else if (period.get(DurationFieldType.months()) > 0) {
-      int months = period.get(DurationFieldType.months());
+      final int months = period.get(DurationFieldType.months());
       periodStr = months + " month(s)";
     } else if (period.get(DurationFieldType.weeks()) > 0) {
-      int weeks = period.get(DurationFieldType.weeks());
+      final int weeks = period.get(DurationFieldType.weeks());
       periodStr = weeks + " week(s)";
     } else if (period.get(DurationFieldType.days()) > 0) {
-      int days = period.get(DurationFieldType.days());
+      final int days = period.get(DurationFieldType.days());
       periodStr = days + " day(s)";
     } else if (period.get(DurationFieldType.hours()) > 0) {
-      int hours = period.get(DurationFieldType.hours());
+      final int hours = period.get(DurationFieldType.hours());
       periodStr = hours + " hour(s)";
     } else if (period.get(DurationFieldType.minutes()) > 0) {
-      int minutes = period.get(DurationFieldType.minutes());
+      final int minutes = period.get(DurationFieldType.minutes());
       periodStr = minutes + " minute(s)";
     } else if (period.get(DurationFieldType.seconds()) > 0) {
-      int seconds = period.get(DurationFieldType.seconds());
+      final int seconds = period.get(DurationFieldType.seconds());
       periodStr = seconds + " second(s)";
     }
 
     return periodStr;
   }
 
-  public String extractNumericalId(String execId) {
-    int index = execId.indexOf('.');
-    int index2 = execId.indexOf('.', index + 1);
+  public String extractNumericalId(final String execId) {
+    final int index = execId.indexOf('.');
+    final int index2 = execId.indexOf('.', index + 1);
 
     return execId.substring(0, index2);
   }
 
-  public String displayBytes(long sizeBytes) {
-    NumberFormat nf = NumberFormat.getInstance();
+  public String displayBytes(final long sizeBytes) {
+    final NumberFormat nf = NumberFormat.getInstance();
     nf.setMaximumFractionDigits(2);
-    if (sizeBytes >= ONE_TB)
+    if (sizeBytes >= ONE_TB) {
       return nf.format(sizeBytes / (double) ONE_TB) + " tb";
-    else if (sizeBytes >= ONE_GB)
+    } else if (sizeBytes >= ONE_GB) {
       return nf.format(sizeBytes / (double) ONE_GB) + " gb";
-    else if (sizeBytes >= ONE_MB)
+    } else if (sizeBytes >= ONE_MB) {
       return nf.format(sizeBytes / (double) ONE_MB) + " mb";
-    else if (sizeBytes >= ONE_KB)
+    } else if (sizeBytes >= ONE_KB) {
       return nf.format(sizeBytes / (double) ONE_KB) + " kb";
-    else
+    } else {
       return sizeBytes + " B";
+    }
   }
 
   /**
@@ -180,7 +178,8 @@ public class WebUtils {
    * @param remoteAddr The client IP address and port from the current request's TCP connection
    * @return The actual client IP address
    */
-  public String getRealClientIpAddr(Map<String, String> httpHeaders, String remoteAddr){
+  public String getRealClientIpAddr(final Map<String, String> httpHeaders,
+      final String remoteAddr) {
 
     // If some upstream device added an X-Forwarded-For header
     // use it for the client ip
@@ -189,17 +188,16 @@ public class WebUtils {
     // the session
 
     String clientIp = httpHeaders.getOrDefault(X_FORWARDED_FOR_HEADER, null);
-    if(clientIp == null){
+    if (clientIp == null) {
       clientIp = remoteAddr;
-    }
-    else{
+    } else {
       // header can contain comma separated list of upstream servers - get the first one
-      String ips[] = clientIp.split(",");
+      final String[] ips = clientIp.split(",");
       clientIp = ips[0];
     }
 
     // Strip off port and only get IP address
-    String parts[] = clientIp.split(":");
+    final String[] parts = clientIp.split(":");
     clientIp = parts[0];
 
     return clientIp;
diff --git a/azkaban-common/src/test/java/azkaban/database/AzkabanConnectionPoolTest.java b/azkaban-common/src/test/java/azkaban/database/AzkabanConnectionPoolTest.java
index 6d25d6a..674371f 100644
--- a/azkaban-common/src/test/java/azkaban/database/AzkabanConnectionPoolTest.java
+++ b/azkaban-common/src/test/java/azkaban/database/AzkabanConnectionPoolTest.java
@@ -17,82 +17,80 @@
 package azkaban.database;
 
 import java.sql.Connection;
-import java.sql.SQLException;
 import org.apache.commons.dbutils.DbUtils;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
 
-public class AzkabanConnectionPoolTest{
-
-  public static class EmbeddedH2BasicDataSource extends AzkabanDataSource {
-
-    public EmbeddedH2BasicDataSource() {
-      super();
-      String url = "jdbc:h2:mem:test";
-      setDriverClassName("org.h2.Driver");
-      setUrl(url);
-    }
-
-    @Override
-    public boolean allowsOnDuplicateKey() {
-      return false;
-    }
-
-    @Override
-    public String getDBType() {
-      return "h2-in-memory";
-    }
-  }
+public class AzkabanConnectionPoolTest {
 
   AzkabanDataSource h2DataSource;
   Connection connection;
 
   @Before
-  public void setup() throws Exception{
-    h2DataSource = new EmbeddedH2BasicDataSource();
-    connection = h2DataSource.getConnection();
+  public void setup() throws Exception {
+    this.h2DataSource = new EmbeddedH2BasicDataSource();
+    this.connection = this.h2DataSource.getConnection();
   }
 
-
   @Test
   public void testConnectionDefaultAutoCommit() throws Exception {
-    Assert.assertEquals(connection.getAutoCommit(), true);
-    DbUtils.closeQuietly(connection);
+    Assert.assertEquals(this.connection.getAutoCommit(), true);
+    DbUtils.closeQuietly(this.connection);
   }
 
   @Test
   public void testConnectionDisableAutoCommit() throws Exception {
-    connection.setAutoCommit(false);
-    Assert.assertEquals(connection.getAutoCommit(), false);
-    DbUtils.closeQuietly(connection);
+    this.connection.setAutoCommit(false);
+    Assert.assertEquals(this.connection.getAutoCommit(), false);
+    DbUtils.closeQuietly(this.connection);
   }
 
   @Test
   public void testGetNewConnectionBeforeClose() throws Exception {
-    connection.setAutoCommit(false);
+    this.connection.setAutoCommit(false);
 
     /**
      * {@link AzkabanDataSource#getConnection} fetches a new connection object other than one in the above, if we don't close.
      */
-    Assert.assertEquals(h2DataSource.getConnection().getAutoCommit(), true);
-    DbUtils.closeQuietly(connection);
+    Assert.assertEquals(this.h2DataSource.getConnection().getAutoCommit(), true);
+    DbUtils.closeQuietly(this.connection);
   }
 
   @Test
   public void testGetNewConnectionAfterClose() throws Exception {
-    connection.setAutoCommit(false);
+    this.connection.setAutoCommit(false);
 
     /**
      * See {@link org.apache.commons.dbcp2.PoolableConnectionFactory#passivateObject}.
      * If the connection disables auto commit, when we close it, connection will be reset enabling auto commit,
      * and returned to connection pool.
      */
-    DbUtils.closeQuietly(connection);
-    Connection newConnection = h2DataSource.getConnection();
+    DbUtils.closeQuietly(this.connection);
+    final Connection newConnection = this.h2DataSource.getConnection();
     Assert.assertEquals(newConnection.getAutoCommit(), true);
 
     DbUtils.closeQuietly(newConnection);
   }
+
+  public static class EmbeddedH2BasicDataSource extends AzkabanDataSource {
+
+    public EmbeddedH2BasicDataSource() {
+      super();
+      final String url = "jdbc:h2:mem:test";
+      setDriverClassName("org.h2.Driver");
+      setUrl(url);
+    }
+
+    @Override
+    public boolean allowsOnDuplicateKey() {
+      return false;
+    }
+
+    @Override
+    public String getDBType() {
+      return "h2-in-memory";
+    }
+  }
 }
diff --git a/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseSetupTest.java b/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseSetupTest.java
index a956bf0..3cc970d 100644
--- a/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseSetupTest.java
+++ b/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseSetupTest.java
@@ -16,40 +16,35 @@
 
 package azkaban.database;
 
-import com.google.common.io.Resources;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
+import azkaban.utils.Props;
+import com.google.common.io.Resources;
 import java.io.File;
 import java.io.IOException;
-import java.net.URL;
 import java.net.URISyntaxException;
+import java.net.URL;
 import java.sql.SQLException;
-
 import javax.sql.DataSource;
-
 import org.apache.commons.dbutils.QueryRunner;
-
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
-import org.junit.Test;
 import org.junit.Rule;
+import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
-import azkaban.utils.Props;
-
 public class AzkabanDatabaseSetupTest {
-  @Rule
-  public TemporaryFolder temp = new TemporaryFolder();
 
   private static String sqlScriptsDir;
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
 
   @BeforeClass
   public static void setupDB() throws IOException, URISyntaxException {
-    URL resourceUrl = Resources.getResource("sql");
+    final URL resourceUrl = Resources.getResource("sql");
     assertNotNull(resourceUrl);
     sqlScriptsDir = new File(resourceUrl.toURI()).getCanonicalPath();
   }
@@ -58,11 +53,54 @@ public class AzkabanDatabaseSetupTest {
   public static void teardownDB() {
   }
 
-  @Ignore @Test
+  private static Props getH2Props(final String dbDir, final String sqlScriptsDir) {
+    final Props props = new Props();
+    props.put("database.type", "h2");
+    props.put("h2.path", dbDir);
+    props.put("database.sql.scripts.dir", sqlScriptsDir);
+    return props;
+  }
+
+  private static Props getMySQLProps(final String sqlScriptsDir) {
+    final Props props = new Props();
+
+    props.put("database.type", "mysql");
+    props.put("mysql.port", "3306");
+    props.put("mysql.host", "localhost");
+    props.put("mysql.database", "azkabanunittest");
+    props.put("mysql.user", "root");
+    props.put("database.sql.scripts.dir", sqlScriptsDir);
+    props.put("mysql.password", "");
+    props.put("mysql.numconnections", 10);
+
+    return props;
+  }
+
+  private static void clearMySQLTestDB() throws SQLException {
+    final Props props = new Props();
+    props.put("database.type", "mysql");
+    props.put("mysql.host", "localhost");
+    props.put("mysql.port", "3306");
+    props.put("mysql.database", "");
+    props.put("mysql.user", "root");
+    props.put("mysql.password", "");
+    props.put("mysql.numconnections", 10);
+
+    final DataSource datasource = DataSourceUtils.getDataSource(props);
+    final QueryRunner runner = new QueryRunner(datasource);
+    try {
+      runner.update("drop database azkabanunittest");
+    } catch (final SQLException e) {
+    }
+    runner.update("create database azkabanunittest");
+  }
+
+  @Ignore
+  @Test
   public void testH2Query() throws Exception {
-    File dbDir = temp.newFolder("h2dbtest");
-    Props h2Props = getH2Props(dbDir.getCanonicalPath(), sqlScriptsDir);
-    AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(h2Props);
+    final File dbDir = this.temp.newFolder("h2dbtest");
+    final Props h2Props = getH2Props(dbDir.getCanonicalPath(), sqlScriptsDir);
+    final AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(h2Props);
 
     // First time will create the tables
     setup.loadTableInfo();
@@ -83,11 +121,12 @@ public class AzkabanDatabaseSetupTest {
     assertFalse(setup.needsUpdating());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testMySQLQuery() throws Exception {
     clearMySQLTestDB();
-    Props mysqlProps = getMySQLProps(sqlScriptsDir);
-    AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(mysqlProps);
+    final Props mysqlProps = getMySQLProps(sqlScriptsDir);
+    final AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(mysqlProps);
 
     // First time will create the tables
     setup.loadTableInfo();
@@ -107,46 +146,4 @@ public class AzkabanDatabaseSetupTest {
     setup.printUpgradePlan();
     assertFalse(setup.needsUpdating());
   }
-
-  private static Props getH2Props(String dbDir, String sqlScriptsDir) {
-    Props props = new Props();
-    props.put("database.type", "h2");
-    props.put("h2.path", dbDir);
-    props.put("database.sql.scripts.dir", sqlScriptsDir);
-    return props;
-  }
-
-  private static Props getMySQLProps(String sqlScriptsDir) {
-    Props props = new Props();
-
-    props.put("database.type", "mysql");
-    props.put("mysql.port", "3306");
-    props.put("mysql.host", "localhost");
-    props.put("mysql.database", "azkabanunittest");
-    props.put("mysql.user", "root");
-    props.put("database.sql.scripts.dir", sqlScriptsDir);
-    props.put("mysql.password", "");
-    props.put("mysql.numconnections", 10);
-
-    return props;
-  }
-
-  private static void clearMySQLTestDB() throws SQLException {
-    Props props = new Props();
-    props.put("database.type", "mysql");
-    props.put("mysql.host", "localhost");
-    props.put("mysql.port", "3306");
-    props.put("mysql.database", "");
-    props.put("mysql.user", "root");
-    props.put("mysql.password", "");
-    props.put("mysql.numconnections", 10);
-
-    DataSource datasource = DataSourceUtils.getDataSource(props);
-    QueryRunner runner = new QueryRunner(datasource);
-    try {
-      runner.update("drop database azkabanunittest");
-    } catch (SQLException e) {
-    }
-    runner.update("create database azkabanunittest");
-  }
 }
diff --git a/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseUpdaterTest.java b/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseUpdaterTest.java
index cd87e5f..31a9cfb 100644
--- a/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseUpdaterTest.java
+++ b/azkaban-common/src/test/java/azkaban/database/AzkabanDatabaseUpdaterTest.java
@@ -16,89 +16,86 @@
 
 package azkaban.database;
 
-import com.google.common.io.Resources;
+import static org.junit.Assert.assertNotNull;
 
+import azkaban.utils.Props;
+import com.google.common.io.Resources;
 import java.io.File;
 import java.net.URL;
 import java.sql.SQLException;
-
 import javax.sql.DataSource;
-
 import org.apache.commons.dbutils.QueryRunner;
-
 import org.junit.Ignore;
 import org.junit.Test;
 
-import static org.junit.Assert.assertNotNull;
+public class AzkabanDatabaseUpdaterTest {
 
-import azkaban.utils.Props;
+  private static void clearMySQLTestDb() throws SQLException {
+    final Props props = new Props();
 
-public class AzkabanDatabaseUpdaterTest {
-  @Ignore @Test
+    props.put("database.type", "mysql");
+    props.put("mysql.host", "localhost");
+    props.put("mysql.port", "3306");
+    props.put("mysql.database", "");
+    props.put("mysql.user", "root");
+    props.put("mysql.password", "");
+    props.put("mysql.numconnections", 10);
+
+    final DataSource datasource = DataSourceUtils.getDataSource(props);
+    final QueryRunner runner = new QueryRunner(datasource);
+    try {
+      runner.update("drop database azkabanunittest");
+    } catch (final SQLException e) {
+    }
+    runner.update("create database azkabanunittest");
+  }
+
+  @Ignore
+  @Test
   public void testMySQLAutoCreate() throws Exception {
     clearMySQLTestDb();
 
-    URL resourceUrl = Resources.getResource("conf/dbtestmysql");
+    final URL resourceUrl = Resources.getResource("conf/dbtestmysql");
     assertNotNull(resourceUrl);
-    File resource = new File(resourceUrl.toURI());
-    String confDir = resource.getParent();
+    final File resource = new File(resourceUrl.toURI());
+    final String confDir = resource.getParent();
 
     System.out.println("1.***Now testing check");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir});
 
     System.out.println("2.***Now testing update");
-    AzkabanDatabaseUpdater.main(new String[] { "-u", "-c", confDir });
+    AzkabanDatabaseUpdater.main(new String[]{"-u", "-c", confDir});
 
     System.out.println("3.***Now testing check again");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir});
 
     System.out.println("4.***Now testing update again");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir, "-u" });
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir, "-u"});
 
     System.out.println("5.***Now testing check again");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir});
   }
 
   @Test
   public void testH2AutoCreate() throws Exception {
-    URL resourceUrl = Resources.getResource("conf/dbtesth2");
+    final URL resourceUrl = Resources.getResource("conf/dbtesth2");
     assertNotNull(resourceUrl);
-    File resource = new File(resourceUrl.toURI());
-    String confDir = resource.getParent();
+    final File resource = new File(resourceUrl.toURI());
+    final String confDir = resource.getParent();
 
     System.out.println("1.***Now testing check");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir});
 
     System.out.println("2.***Now testing update");
-    AzkabanDatabaseUpdater.main(new String[] { "-u", "-c", confDir });
+    AzkabanDatabaseUpdater.main(new String[]{"-u", "-c", confDir});
 
     System.out.println("3.***Now testing check again");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir});
 
     System.out.println("4.***Now testing update again");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir, "-u" });
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir, "-u"});
 
     System.out.println("5.***Now testing check again");
-    AzkabanDatabaseUpdater.main(new String[] { "-c", confDir });
-  }
-
-  private static void clearMySQLTestDb() throws SQLException {
-    Props props = new Props();
-
-    props.put("database.type", "mysql");
-    props.put("mysql.host", "localhost");
-    props.put("mysql.port", "3306");
-    props.put("mysql.database", "");
-    props.put("mysql.user", "root");
-    props.put("mysql.password", "");
-    props.put("mysql.numconnections", 10);
-
-    DataSource datasource = DataSourceUtils.getDataSource(props);
-    QueryRunner runner = new QueryRunner(datasource);
-    try {
-      runner.update("drop database azkabanunittest");
-    } catch (SQLException e) {
-    }
-    runner.update("create database azkabanunittest");
+    AzkabanDatabaseUpdater.main(new String[]{"-c", confDir});
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowPriorityComparatorTest.java b/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowPriorityComparatorTest.java
index ed6543f..80a368a 100644
--- a/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowPriorityComparatorTest.java
+++ b/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowPriorityComparatorTest.java
@@ -16,44 +16,31 @@
 
 package azkaban.executor;
 
-import java.io.File;
+import azkaban.utils.Pair;
+import azkaban.utils.TestUtils;
 import java.io.IOException;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Set;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.PriorityBlockingQueue;
-
 import org.junit.Assert;
 import org.junit.Test;
 
-import azkaban.alert.Alerter;
-import azkaban.flow.Flow;
-import azkaban.project.Project;
-import azkaban.user.User;
-import azkaban.utils.JSONUtils;
-import azkaban.utils.Pair;
-import azkaban.utils.Props;
-import azkaban.utils.TestUtils;
-
 /**
  * Test class for ExecutableFlowPriorityComparator
- * */
+ */
 
 public class ExecutableFlowPriorityComparatorTest {
 
   /* Helper method to create an ExecutableFlow from serialized description */
-  private ExecutableFlow createExecutableFlow(String flowName, int priority,
-    long updateTime, int executionId) throws IOException {
-    ExecutableFlow execFlow =
-      TestUtils.createExecutableFlow("exectest1", flowName);
+  private ExecutableFlow createExecutableFlow(final String flowName, final int priority,
+      final long updateTime, final int executionId) throws IOException {
+    final ExecutableFlow execFlow =
+        TestUtils.createExecutableFlow("exectest1", flowName);
 
     execFlow.setUpdateTime(updateTime);
     execFlow.setExecutionId(executionId);
     if (priority > 0) {
       execFlow.getExecutionOptions().getFlowParameters()
-        .put(ExecutionOptions.FLOW_PRIORITY, String.valueOf(priority));
+          .put(ExecutionOptions.FLOW_PRIORITY, String.valueOf(priority));
     }
     return execFlow;
   }
@@ -61,18 +48,18 @@ public class ExecutableFlowPriorityComparatorTest {
   /* priority queue order when all priorities are explicitly specified */
   @Test
   public void testExplicitlySpecifiedPriorities() throws IOException,
-    InterruptedException {
-    ExecutableFlow flow1 = createExecutableFlow("exec1", 5, 3, 1);
-    ExecutableFlow flow2 = createExecutableFlow("exec2", 6, 3, 2);
-    ExecutableFlow flow3 = createExecutableFlow("exec3", 2, 3, 3);
-    ExecutionReference dummyRef = new ExecutionReference(0);
-
-    BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
-      new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
-        new ExecutableFlowPriorityComparator());
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
+      InterruptedException {
+    final ExecutableFlow flow1 = createExecutableFlow("exec1", 5, 3, 1);
+    final ExecutableFlow flow2 = createExecutableFlow("exec2", 6, 3, 2);
+    final ExecutableFlow flow3 = createExecutableFlow("exec3", 2, 3, 3);
+    final ExecutionReference dummyRef = new ExecutionReference(0);
+
+    final BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
+        new PriorityBlockingQueue<>(10,
+            new ExecutableFlowPriorityComparator());
+    queue.put(new Pair<>(dummyRef, flow1));
+    queue.put(new Pair<>(dummyRef, flow2));
+    queue.put(new Pair<>(dummyRef, flow3));
 
     Assert.assertEquals(flow2, queue.take().getSecond());
     Assert.assertEquals(flow1, queue.take().getSecond());
@@ -82,18 +69,18 @@ public class ExecutableFlowPriorityComparatorTest {
   /* priority queue order when some priorities are implicitly specified */
   @Test
   public void testMixedSpecifiedPriorities() throws IOException,
-    InterruptedException {
-    ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 3, 1);
-    ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 3, 2);
-    ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 3, 3);
-    ExecutionReference dummyRef = new ExecutionReference(0);
-
-    BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
-      new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
-        new ExecutableFlowPriorityComparator());
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
+      InterruptedException {
+    final ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 3, 1);
+    final ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 3, 2);
+    final ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 3, 3);
+    final ExecutionReference dummyRef = new ExecutionReference(0);
+
+    final BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
+        new PriorityBlockingQueue<>(10,
+            new ExecutableFlowPriorityComparator());
+    queue.put(new Pair<>(dummyRef, flow1));
+    queue.put(new Pair<>(dummyRef, flow2));
+    queue.put(new Pair<>(dummyRef, flow3));
 
     Assert.assertEquals(flow3, queue.take().getSecond());
     Assert.assertEquals(flow1, queue.take().getSecond());
@@ -106,20 +93,20 @@ public class ExecutableFlowPriorityComparatorTest {
    */
   @Test
   public void testEqualPriorities() throws IOException, InterruptedException {
-    ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 1, 1);
-    ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 2, 2);
-    ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 3, 3);
-    ExecutableFlow flow4 = createExecutableFlow("exec3", 3, 4, 4);
-    ExecutionReference dummyRef = new ExecutionReference(0);
+    final ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 1, 1);
+    final ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 2, 2);
+    final ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 3, 3);
+    final ExecutableFlow flow4 = createExecutableFlow("exec3", 3, 4, 4);
+    final ExecutionReference dummyRef = new ExecutionReference(0);
 
-    BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
-      new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
-        new ExecutableFlowPriorityComparator());
+    final BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
+        new PriorityBlockingQueue<>(10,
+            new ExecutableFlowPriorityComparator());
 
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow4));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
+    queue.put(new Pair<>(dummyRef, flow4));
+    queue.put(new Pair<>(dummyRef, flow1));
+    queue.put(new Pair<>(dummyRef, flow2));
+    queue.put(new Pair<>(dummyRef, flow3));
 
     Assert.assertEquals(flow3, queue.take().getSecond());
     Assert.assertEquals(flow1, queue.take().getSecond());
@@ -133,21 +120,21 @@ public class ExecutableFlowPriorityComparatorTest {
    */
   @Test
   public void testEqualUpdateTimeAndPriority() throws IOException,
-    InterruptedException {
-    ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 1, 1);
-    ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 2, 2);
-    ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 2, 3);
-    ExecutableFlow flow4 = createExecutableFlow("exec3", 3, 4, 4);
-    ExecutionReference dummyRef = new ExecutionReference(0);
-
-    BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
-      new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
-        new ExecutableFlowPriorityComparator());
-
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow4));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
-    queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
+      InterruptedException {
+    final ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 1, 1);
+    final ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 2, 2);
+    final ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 2, 3);
+    final ExecutableFlow flow4 = createExecutableFlow("exec3", 3, 4, 4);
+    final ExecutionReference dummyRef = new ExecutionReference(0);
+
+    final BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
+        new PriorityBlockingQueue<>(10,
+            new ExecutableFlowPriorityComparator());
+
+    queue.put(new Pair<>(dummyRef, flow4));
+    queue.put(new Pair<>(dummyRef, flow1));
+    queue.put(new Pair<>(dummyRef, flow2));
+    queue.put(new Pair<>(dummyRef, flow3));
 
     Assert.assertEquals(flow3, queue.take().getSecond());
     Assert.assertEquals(flow1, queue.take().getSecond());
diff --git a/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowTest.java b/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowTest.java
index c6400f8..b4ee1f1 100644
--- a/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowTest.java
+++ b/azkaban-common/src/test/java/azkaban/executor/ExecutableFlowTest.java
@@ -16,43 +16,207 @@
 
 package azkaban.executor;
 
-import java.io.File;
+import azkaban.executor.ExecutionOptions.FailureAction;
+import azkaban.flow.Flow;
+import azkaban.project.DirectoryFlowLoader;
+import azkaban.project.Project;
+import azkaban.test.executions.TestExecutions;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Props;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-
 import org.apache.log4j.Logger;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import azkaban.executor.ExecutionOptions.FailureAction;
-import azkaban.flow.Flow;
-import azkaban.project.DirectoryFlowLoader;
-import azkaban.project.Project;
-import azkaban.test.executions.TestExecutions;
-import azkaban.utils.JSONUtils;
-import azkaban.utils.Props;
-
 public class ExecutableFlowTest {
+
   private Project project;
 
+  private static void testEquals(final ExecutableNode a, final ExecutableNode b) {
+    if (a instanceof ExecutableFlow) {
+      if (b instanceof ExecutableFlow) {
+        final ExecutableFlow exA = (ExecutableFlow) a;
+        final ExecutableFlow exB = (ExecutableFlow) b;
+
+        Assert.assertEquals(exA.getScheduleId(), exB.getScheduleId());
+        Assert.assertEquals(exA.getProjectId(), exB.getProjectId());
+        Assert.assertEquals(exA.getVersion(), exB.getVersion());
+        Assert.assertEquals(exA.getSubmitTime(), exB.getSubmitTime());
+        Assert.assertEquals(exA.getSubmitUser(), exB.getSubmitUser());
+        Assert.assertEquals(exA.getExecutionPath(), exB.getExecutionPath());
+
+        testEquals(exA.getExecutionOptions(), exB.getExecutionOptions());
+      } else {
+        Assert.fail("A is ExecutableFlow, but B is not");
+      }
+    }
+
+    if (a instanceof ExecutableFlowBase) {
+      if (b instanceof ExecutableFlowBase) {
+        final ExecutableFlowBase exA = (ExecutableFlowBase) a;
+        final ExecutableFlowBase exB = (ExecutableFlowBase) b;
+
+        Assert.assertEquals(exA.getFlowId(), exB.getFlowId());
+        Assert.assertEquals(exA.getExecutableNodes().size(), exB
+            .getExecutableNodes().size());
+
+        for (final ExecutableNode nodeA : exA.getExecutableNodes()) {
+          final ExecutableNode nodeB = exB.getExecutableNode(nodeA.getId());
+          Assert.assertNotNull(nodeB);
+          Assert.assertEquals(a, nodeA.getParentFlow());
+          Assert.assertEquals(b, nodeB.getParentFlow());
+
+          testEquals(nodeA, nodeB);
+        }
+      } else {
+        Assert.fail("A is ExecutableFlowBase, but B is not");
+      }
+    }
+
+    Assert.assertEquals(a.getId(), b.getId());
+    Assert.assertEquals(a.getStatus(), b.getStatus());
+    Assert.assertEquals(a.getStartTime(), b.getStartTime());
+    Assert.assertEquals(a.getEndTime(), b.getEndTime());
+    Assert.assertEquals(a.getUpdateTime(), b.getUpdateTime());
+    Assert.assertEquals(a.getAttempt(), b.getAttempt());
+
+    Assert.assertEquals(a.getJobSource(), b.getJobSource());
+    Assert.assertEquals(a.getPropsSource(), b.getPropsSource());
+    Assert.assertEquals(a.getInNodes(), a.getInNodes());
+    Assert.assertEquals(a.getOutNodes(), a.getOutNodes());
+  }
+
+  private static void testEquals(final ExecutionOptions optionsA,
+      final ExecutionOptions optionsB) {
+    Assert.assertEquals(optionsA.getConcurrentOption(),
+        optionsB.getConcurrentOption());
+    Assert.assertEquals(optionsA.getNotifyOnFirstFailure(),
+        optionsB.getNotifyOnFirstFailure());
+    Assert.assertEquals(optionsA.getNotifyOnLastFailure(),
+        optionsB.getNotifyOnLastFailure());
+    Assert.assertEquals(optionsA.getFailureAction(),
+        optionsB.getFailureAction());
+    Assert.assertEquals(optionsA.getPipelineExecutionId(),
+        optionsB.getPipelineExecutionId());
+    Assert.assertEquals(optionsA.getPipelineLevel(),
+        optionsB.getPipelineLevel());
+    Assert.assertEquals(optionsA.isFailureEmailsOverridden(),
+        optionsB.isFailureEmailsOverridden());
+    Assert.assertEquals(optionsA.isSuccessEmailsOverridden(),
+        optionsB.isSuccessEmailsOverridden());
+
+    testDisabledEquals(optionsA.getDisabledJobs(), optionsB.getDisabledJobs());
+    testEquals(optionsA.getSuccessEmails(), optionsB.getSuccessEmails());
+    testEquals(optionsA.getFailureEmails(), optionsB.getFailureEmails());
+    testEquals(optionsA.getFlowParameters(), optionsB.getFlowParameters());
+  }
+
+  private static void testEquals(final Set<String> a, final Set<String> b) {
+    if (a == b) {
+      return;
+    }
+
+    if (a == null || b == null) {
+      Assert.fail();
+    }
+
+    Assert.assertEquals(a.size(), b.size());
+
+    final Iterator<String> iterA = a.iterator();
+
+    while (iterA.hasNext()) {
+      final String aStr = iterA.next();
+      Assert.assertTrue(b.contains(aStr));
+    }
+  }
+
+  private static void testEquals(final List<String> a, final List<String> b) {
+    if (a == b) {
+      return;
+    }
+
+    if (a == null || b == null) {
+      Assert.fail();
+    }
+
+    Assert.assertEquals(a.size(), b.size());
+
+    final Iterator<String> iterA = a.iterator();
+    final Iterator<String> iterB = b.iterator();
+
+    while (iterA.hasNext()) {
+      final String aStr = iterA.next();
+      final String bStr = iterB.next();
+      Assert.assertEquals(aStr, bStr);
+    }
+  }
+
+  private static void testDisabledEquals(final List<Object> a, final List<Object> b) {
+    if (a == b) {
+      return;
+    }
+
+    if (a == null || b == null) {
+      Assert.fail();
+    }
+
+    Assert.assertEquals(a.size(), b.size());
+
+    final Iterator<Object> iterA = a.iterator();
+    final Iterator<Object> iterB = b.iterator();
+
+    while (iterA.hasNext()) {
+      final Object aStr = iterA.next();
+      final Object bStr = iterB.next();
+
+      if (aStr instanceof Map && bStr instanceof Map) {
+        final Map<String, Object> aMap = (Map<String, Object>) aStr;
+        final Map<String, Object> bMap = (Map<String, Object>) bStr;
+
+        Assert.assertEquals((String) aMap.get("id"), (String) bMap.get("id"));
+        testDisabledEquals((List<Object>) aMap.get("children"),
+            (List<Object>) bMap.get("children"));
+      } else {
+        Assert.assertEquals(aStr, bStr);
+      }
+    }
+  }
+
+  private static void testEquals(final Map<String, String> a, final Map<String, String> b) {
+    if (a == b) {
+      return;
+    }
+
+    if (a == null || b == null) {
+      Assert.fail();
+    }
+
+    Assert.assertEquals(a.size(), b.size());
+
+    for (final String key : a.keySet()) {
+      Assert.assertEquals(a.get(key), b.get(key));
+    }
+  }
+
   @Before
   public void setUp() throws Exception {
-    project = new Project(11, "myTestProject");
+    this.project = new Project(11, "myTestProject");
 
-    Logger logger = Logger.getLogger(this.getClass());
-    DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
+    final Logger logger = Logger.getLogger(this.getClass());
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
 
-    loader.loadProjectFlow(project, TestExecutions.getFlowDir("embedded"));
+    loader.loadProjectFlow(this.project, TestExecutions.getFlowDir("embedded"));
     Assert.assertEquals(0, loader.getErrors().size());
 
-    project.setFlows(loader.getFlowMap());
-    project.setVersion(123);
+    this.project.setFlows(loader.getFlowMap());
+    this.project.setVersion(123);
   }
 
   @After
@@ -61,10 +225,10 @@ public class ExecutableFlowTest {
 
   @Test
   public void testExecutorFlowCreation() throws Exception {
-    Flow flow = project.getFlow("jobe");
+    final Flow flow = this.project.getFlow("jobe");
     Assert.assertNotNull(flow);
 
-    ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
     Assert.assertNotNull(exFlow.getExecutableNode("joba"));
     Assert.assertNotNull(exFlow.getExecutableNode("jobb"));
     Assert.assertNotNull(exFlow.getExecutableNode("jobc"));
@@ -77,11 +241,11 @@ public class ExecutableFlowTest {
     Assert.assertTrue(exFlow.getExecutableNode("jobd") instanceof ExecutableFlowBase);
     Assert.assertFalse(exFlow.getExecutableNode("jobe") instanceof ExecutableFlowBase);
 
-    ExecutableFlowBase jobbFlow =
+    final ExecutableFlowBase jobbFlow =
         (ExecutableFlowBase) exFlow.getExecutableNode("jobb");
-    ExecutableFlowBase jobcFlow =
+    final ExecutableFlowBase jobcFlow =
         (ExecutableFlowBase) exFlow.getExecutableNode("jobc");
-    ExecutableFlowBase jobdFlow =
+    final ExecutableFlowBase jobdFlow =
         (ExecutableFlowBase) exFlow.getExecutableNode("jobd");
 
     Assert.assertEquals("innerFlow", jobbFlow.getFlowId());
@@ -99,105 +263,101 @@ public class ExecutableFlowTest {
 
   @Test
   public void testExecutorFlowJson() throws Exception {
-    Flow flow = project.getFlow("jobe");
+    final Flow flow = this.project.getFlow("jobe");
     Assert.assertNotNull(flow);
 
-    ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
 
-    Object obj = exFlow.toObject();
-    String exFlowJSON = JSONUtils.toJSON(obj);
-    @SuppressWarnings("unchecked")
-    Map<String, Object> flowObjMap =
+    final Object obj = exFlow.toObject();
+    final String exFlowJSON = JSONUtils.toJSON(obj);
+    final Map<String, Object> flowObjMap =
         (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON);
 
-    ExecutableFlow parsedExFlow =
+    final ExecutableFlow parsedExFlow =
         ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
     testEquals(exFlow, parsedExFlow);
   }
 
   @Test
   public void testExecutorFlowJson2() throws Exception {
-    Flow flow = project.getFlow("jobe");
+    final Flow flow = this.project.getFlow("jobe");
     Assert.assertNotNull(flow);
 
-    ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
     exFlow.setExecutionId(101);
     exFlow.setAttempt(2);
     exFlow.setDelayedExecution(1000);
 
-    ExecutionOptions options = new ExecutionOptions();
+    final ExecutionOptions options = new ExecutionOptions();
     options.setConcurrentOption("blah");
-    options.setDisabledJobs(Arrays.asList(new Object[] { "bee", null, "boo" }));
+    options.setDisabledJobs(Arrays.asList(new Object[]{"bee", null, "boo"}));
     options.setFailureAction(FailureAction.CANCEL_ALL);
     options
-        .setFailureEmails(Arrays.asList(new String[] { "doo", null, "daa" }));
+        .setFailureEmails(Arrays.asList(new String[]{"doo", null, "daa"}));
     options
-        .setSuccessEmails(Arrays.asList(new String[] { "dee", null, "dae" }));
+        .setSuccessEmails(Arrays.asList(new String[]{"dee", null, "dae"}));
     options.setPipelineLevel(2);
     options.setPipelineExecutionId(3);
     options.setNotifyOnFirstFailure(true);
     options.setNotifyOnLastFailure(true);
 
-    HashMap<String, String> flowProps = new HashMap<String, String>();
+    final HashMap<String, String> flowProps = new HashMap<>();
     flowProps.put("la", "fa");
     options.addAllFlowParameters(flowProps);
     exFlow.setExecutionOptions(options);
 
-    Object obj = exFlow.toObject();
-    String exFlowJSON = JSONUtils.toJSON(obj);
-    @SuppressWarnings("unchecked")
-    Map<String, Object> flowObjMap =
+    final Object obj = exFlow.toObject();
+    final String exFlowJSON = JSONUtils.toJSON(obj);
+    final Map<String, Object> flowObjMap =
         (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON);
 
-    ExecutableFlow parsedExFlow =
+    final ExecutableFlow parsedExFlow =
         ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
     testEquals(exFlow, parsedExFlow);
   }
 
-  @SuppressWarnings("rawtypes")
   @Test
   public void testExecutorFlowUpdates() throws Exception {
-    Flow flow = project.getFlow("jobe");
-    ExecutableFlow exFlow = new ExecutableFlow(project, flow);
+    final Flow flow = this.project.getFlow("jobe");
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
     exFlow.setExecutionId(101);
 
     // Create copy of flow
-    Object obj = exFlow.toObject();
-    String exFlowJSON = JSONUtils.toJSON(obj);
-    @SuppressWarnings("unchecked")
-    Map<String, Object> flowObjMap =
+    final Object obj = exFlow.toObject();
+    final String exFlowJSON = JSONUtils.toJSON(obj);
+    final Map<String, Object> flowObjMap =
         (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON);
-    ExecutableFlow copyFlow =
+    final ExecutableFlow copyFlow =
         ExecutableFlow.createExecutableFlowFromObject(flowObjMap);
 
     testEquals(exFlow, copyFlow);
 
-    ExecutableNode joba = exFlow.getExecutableNode("joba");
-    ExecutableFlowBase jobb =
+    final ExecutableNode joba = exFlow.getExecutableNode("joba");
+    final ExecutableFlowBase jobb =
         (ExecutableFlowBase) (exFlow.getExecutableNode("jobb"));
-    ExecutableFlowBase jobc =
+    final ExecutableFlowBase jobc =
         (ExecutableFlowBase) (exFlow.getExecutableNode("jobc"));
-    ExecutableFlowBase jobd =
+    final ExecutableFlowBase jobd =
         (ExecutableFlowBase) (exFlow.getExecutableNode("jobd"));
-    ExecutableNode jobe = exFlow.getExecutableNode("jobe");
+    final ExecutableNode jobe = exFlow.getExecutableNode("jobe");
     assertNotNull(joba, jobb, jobc, jobd, jobe);
 
-    ExecutableNode jobbInnerFlowA = jobb.getExecutableNode("innerJobA");
-    ExecutableNode jobbInnerFlowB = jobb.getExecutableNode("innerJobB");
-    ExecutableNode jobbInnerFlowC = jobb.getExecutableNode("innerJobC");
-    ExecutableNode jobbInnerFlow = jobb.getExecutableNode("innerFlow");
+    final ExecutableNode jobbInnerFlowA = jobb.getExecutableNode("innerJobA");
+    final ExecutableNode jobbInnerFlowB = jobb.getExecutableNode("innerJobB");
+    final ExecutableNode jobbInnerFlowC = jobb.getExecutableNode("innerJobC");
+    final ExecutableNode jobbInnerFlow = jobb.getExecutableNode("innerFlow");
     assertNotNull(jobbInnerFlowA, jobbInnerFlowB, jobbInnerFlowC, jobbInnerFlow);
 
-    ExecutableNode jobcInnerFlowA = jobc.getExecutableNode("innerJobA");
-    ExecutableNode jobcInnerFlowB = jobc.getExecutableNode("innerJobB");
-    ExecutableNode jobcInnerFlowC = jobc.getExecutableNode("innerJobC");
-    ExecutableNode jobcInnerFlow = jobc.getExecutableNode("innerFlow");
+    final ExecutableNode jobcInnerFlowA = jobc.getExecutableNode("innerJobA");
+    final ExecutableNode jobcInnerFlowB = jobc.getExecutableNode("innerJobB");
+    final ExecutableNode jobcInnerFlowC = jobc.getExecutableNode("innerJobC");
+    final ExecutableNode jobcInnerFlow = jobc.getExecutableNode("innerFlow");
     assertNotNull(jobcInnerFlowA, jobcInnerFlowB, jobcInnerFlowC, jobcInnerFlow);
 
-    ExecutableNode jobdInnerFlowA = jobd.getExecutableNode("innerJobA");
-    ExecutableNode jobdInnerFlowB = jobd.getExecutableNode("innerJobB");
-    ExecutableNode jobdInnerFlowC = jobd.getExecutableNode("innerJobC");
-    ExecutableNode jobdInnerFlow = jobd.getExecutableNode("innerFlow");
+    final ExecutableNode jobdInnerFlowA = jobd.getExecutableNode("innerJobA");
+    final ExecutableNode jobdInnerFlowB = jobd.getExecutableNode("innerJobB");
+    final ExecutableNode jobdInnerFlowC = jobd.getExecutableNode("innerJobC");
+    final ExecutableNode jobdInnerFlow = jobd.getExecutableNode("innerFlow");
     assertNotNull(jobdInnerFlowA, jobdInnerFlowB, jobdInnerFlowC, jobdInnerFlow);
 
     exFlow.setEndTime(1000);
@@ -206,7 +366,7 @@ public class ExecutableFlowTest {
     exFlow.setUpdateTime(133);
 
     // Change one job and see if it updates
-    long time = System.currentTimeMillis();
+    final long time = System.currentTimeMillis();
     jobe.setEndTime(time);
     jobe.setUpdateTime(time);
     jobe.setStatus(Status.DISABLED);
@@ -223,7 +383,7 @@ public class ExecutableFlowTest {
     Assert.assertNull(updateObject.get("nodes"));
 
     // Change inner flow
-    long currentTime = time + 1;
+    final long currentTime = time + 1;
     jobbInnerFlowA.setEndTime(currentTime);
     jobbInnerFlowA.setUpdateTime(currentTime);
     jobbInnerFlowA.setStatus(Status.DISABLED);
@@ -245,176 +405,9 @@ public class ExecutableFlowTest {
     Assert.assertNull(updateObject.get("nodes"));
   }
 
-  private void assertNotNull(ExecutableNode... nodes) {
-    for (ExecutableNode node : nodes) {
+  private void assertNotNull(final ExecutableNode... nodes) {
+    for (final ExecutableNode node : nodes) {
       Assert.assertNotNull(node);
     }
   }
-
-  private static void testEquals(ExecutableNode a, ExecutableNode b) {
-    if (a instanceof ExecutableFlow) {
-      if (b instanceof ExecutableFlow) {
-        ExecutableFlow exA = (ExecutableFlow) a;
-        ExecutableFlow exB = (ExecutableFlow) b;
-
-        Assert.assertEquals(exA.getScheduleId(), exB.getScheduleId());
-        Assert.assertEquals(exA.getProjectId(), exB.getProjectId());
-        Assert.assertEquals(exA.getVersion(), exB.getVersion());
-        Assert.assertEquals(exA.getSubmitTime(), exB.getSubmitTime());
-        Assert.assertEquals(exA.getSubmitUser(), exB.getSubmitUser());
-        Assert.assertEquals(exA.getExecutionPath(), exB.getExecutionPath());
-
-        testEquals(exA.getExecutionOptions(), exB.getExecutionOptions());
-      } else {
-        Assert.fail("A is ExecutableFlow, but B is not");
-      }
-    }
-
-    if (a instanceof ExecutableFlowBase) {
-      if (b instanceof ExecutableFlowBase) {
-        ExecutableFlowBase exA = (ExecutableFlowBase) a;
-        ExecutableFlowBase exB = (ExecutableFlowBase) b;
-
-        Assert.assertEquals(exA.getFlowId(), exB.getFlowId());
-        Assert.assertEquals(exA.getExecutableNodes().size(), exB
-            .getExecutableNodes().size());
-
-        for (ExecutableNode nodeA : exA.getExecutableNodes()) {
-          ExecutableNode nodeB = exB.getExecutableNode(nodeA.getId());
-          Assert.assertNotNull(nodeB);
-          Assert.assertEquals(a, nodeA.getParentFlow());
-          Assert.assertEquals(b, nodeB.getParentFlow());
-
-          testEquals(nodeA, nodeB);
-        }
-      } else {
-        Assert.fail("A is ExecutableFlowBase, but B is not");
-      }
-    }
-
-    Assert.assertEquals(a.getId(), b.getId());
-    Assert.assertEquals(a.getStatus(), b.getStatus());
-    Assert.assertEquals(a.getStartTime(), b.getStartTime());
-    Assert.assertEquals(a.getEndTime(), b.getEndTime());
-    Assert.assertEquals(a.getUpdateTime(), b.getUpdateTime());
-    Assert.assertEquals(a.getAttempt(), b.getAttempt());
-
-    Assert.assertEquals(a.getJobSource(), b.getJobSource());
-    Assert.assertEquals(a.getPropsSource(), b.getPropsSource());
-    Assert.assertEquals(a.getInNodes(), a.getInNodes());
-    Assert.assertEquals(a.getOutNodes(), a.getOutNodes());
-  }
-
-  private static void testEquals(ExecutionOptions optionsA,
-      ExecutionOptions optionsB) {
-    Assert.assertEquals(optionsA.getConcurrentOption(),
-        optionsB.getConcurrentOption());
-    Assert.assertEquals(optionsA.getNotifyOnFirstFailure(),
-        optionsB.getNotifyOnFirstFailure());
-    Assert.assertEquals(optionsA.getNotifyOnLastFailure(),
-        optionsB.getNotifyOnLastFailure());
-    Assert.assertEquals(optionsA.getFailureAction(),
-        optionsB.getFailureAction());
-    Assert.assertEquals(optionsA.getPipelineExecutionId(),
-        optionsB.getPipelineExecutionId());
-    Assert.assertEquals(optionsA.getPipelineLevel(),
-        optionsB.getPipelineLevel());
-    Assert.assertEquals(optionsA.isFailureEmailsOverridden(),
-        optionsB.isFailureEmailsOverridden());
-    Assert.assertEquals(optionsA.isSuccessEmailsOverridden(),
-        optionsB.isSuccessEmailsOverridden());
-
-    testDisabledEquals(optionsA.getDisabledJobs(), optionsB.getDisabledJobs());
-    testEquals(optionsA.getSuccessEmails(), optionsB.getSuccessEmails());
-    testEquals(optionsA.getFailureEmails(), optionsB.getFailureEmails());
-    testEquals(optionsA.getFlowParameters(), optionsB.getFlowParameters());
-  }
-
-  private static void testEquals(Set<String> a, Set<String> b) {
-    if (a == b) {
-      return;
-    }
-
-    if (a == null || b == null) {
-      Assert.fail();
-    }
-
-    Assert.assertEquals(a.size(), b.size());
-
-    Iterator<String> iterA = a.iterator();
-
-    while (iterA.hasNext()) {
-      String aStr = iterA.next();
-      Assert.assertTrue(b.contains(aStr));
-    }
-  }
-
-  private static void testEquals(List<String> a, List<String> b) {
-    if (a == b) {
-      return;
-    }
-
-    if (a == null || b == null) {
-      Assert.fail();
-    }
-
-    Assert.assertEquals(a.size(), b.size());
-
-    Iterator<String> iterA = a.iterator();
-    Iterator<String> iterB = b.iterator();
-
-    while (iterA.hasNext()) {
-      String aStr = iterA.next();
-      String bStr = iterB.next();
-      Assert.assertEquals(aStr, bStr);
-    }
-  }
-
-  @SuppressWarnings("unchecked")
-  private static void testDisabledEquals(List<Object> a, List<Object> b) {
-    if (a == b) {
-      return;
-    }
-
-    if (a == null || b == null) {
-      Assert.fail();
-    }
-
-    Assert.assertEquals(a.size(), b.size());
-
-    Iterator<Object> iterA = a.iterator();
-    Iterator<Object> iterB = b.iterator();
-
-    while (iterA.hasNext()) {
-      Object aStr = iterA.next();
-      Object bStr = iterB.next();
-
-      if (aStr instanceof Map && bStr instanceof Map) {
-        Map<String, Object> aMap = (Map<String, Object>) aStr;
-        Map<String, Object> bMap = (Map<String, Object>) bStr;
-
-        Assert.assertEquals((String) aMap.get("id"), (String) bMap.get("id"));
-        testDisabledEquals((List<Object>) aMap.get("children"),
-            (List<Object>) bMap.get("children"));
-      } else {
-        Assert.assertEquals(aStr, bStr);
-      }
-    }
-  }
-
-  private static void testEquals(Map<String, String> a, Map<String, String> b) {
-    if (a == b) {
-      return;
-    }
-
-    if (a == null || b == null) {
-      Assert.fail();
-    }
-
-    Assert.assertEquals(a.size(), b.size());
-
-    for (String key : a.keySet()) {
-      Assert.assertEquals(a.get(key), b.get(key));
-    }
-  }
 }
diff --git a/azkaban-common/src/test/java/azkaban/executor/InteractiveTestJob.java b/azkaban-common/src/test/java/azkaban/executor/InteractiveTestJob.java
index 082adac..0bd233d 100644
--- a/azkaban-common/src/test/java/azkaban/executor/InteractiveTestJob.java
+++ b/azkaban-common/src/test/java/azkaban/executor/InteractiveTestJob.java
@@ -16,22 +16,26 @@
 
 package azkaban.executor;
 
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.log4j.Logger;
-
 import azkaban.flow.CommonJobProperties;
 import azkaban.jobExecutor.AbstractProcessJob;
 import azkaban.utils.Props;
+import java.util.concurrent.ConcurrentHashMap;
+import org.apache.log4j.Logger;
 
 public class InteractiveTestJob extends AbstractProcessJob {
-  private static ConcurrentHashMap<String, InteractiveTestJob> testJobs =
-      new ConcurrentHashMap<String, InteractiveTestJob>();
+
+  private static final ConcurrentHashMap<String, InteractiveTestJob> testJobs =
+      new ConcurrentHashMap<>();
   private Props generatedProperties = new Props();
   private boolean isWaiting = true;
   private boolean succeed = true;
 
-  public static InteractiveTestJob getTestJob(String name) {
+  public InteractiveTestJob(final String jobId, final Props sysProps, final Props jobProps,
+      final Logger log) {
+    super(jobId, sysProps, jobProps, log);
+  }
+
+  public static InteractiveTestJob getTestJob(final String name) {
     return testJobs.get(name);
   }
 
@@ -39,31 +43,26 @@ public class InteractiveTestJob extends AbstractProcessJob {
     testJobs.clear();
   }
 
-  public InteractiveTestJob(String jobId, Props sysProps, Props jobProps,
-      Logger log) {
-    super(jobId, sysProps, jobProps, log);
-  }
-
   @Override
   public void run() throws Exception {
-    String nestedFlowPath =
+    final String nestedFlowPath =
         this.getJobProps().get(CommonJobProperties.NESTED_FLOW_PATH);
-    String groupName = this.getJobProps().getString("group", null);
+    final String groupName = this.getJobProps().getString("group", null);
     String id = nestedFlowPath == null ? this.getId() : nestedFlowPath;
     if (groupName != null) {
       id = groupName + ":" + id;
     }
     testJobs.put(id, this);
 
-    while (isWaiting) {
+    while (this.isWaiting) {
       synchronized (this) {
         try {
           wait(30000);
-        } catch (InterruptedException e) {
+        } catch (final InterruptedException e) {
         }
 
-        if (!isWaiting) {
-          if (!succeed) {
+        if (!this.isWaiting) {
+          if (!this.succeed) {
             throw new RuntimeException("Forced failure of " + getId());
           } else {
             info("Job " + getId() + " succeeded.");
@@ -75,32 +74,32 @@ public class InteractiveTestJob extends AbstractProcessJob {
 
   public void failJob() {
     synchronized (this) {
-      succeed = false;
-      isWaiting = false;
+      this.succeed = false;
+      this.isWaiting = false;
       this.notify();
     }
   }
 
   public void succeedJob() {
     synchronized (this) {
-      succeed = true;
-      isWaiting = false;
+      this.succeed = true;
+      this.isWaiting = false;
       this.notify();
     }
   }
 
-  public void succeedJob(Props generatedProperties) {
+  public void succeedJob(final Props generatedProperties) {
     synchronized (this) {
       this.generatedProperties = generatedProperties;
-      succeed = true;
-      isWaiting = false;
+      this.succeed = true;
+      this.isWaiting = false;
       this.notify();
     }
   }
 
   @Override
   public Props getJobGeneratedProperties() {
-    return generatedProperties;
+    return this.generatedProperties;
   }
 
   @Override
diff --git a/azkaban-common/src/test/java/azkaban/executor/JavaJob.java b/azkaban-common/src/test/java/azkaban/executor/JavaJob.java
index edba3db..fa58033 100644
--- a/azkaban-common/src/test/java/azkaban/executor/JavaJob.java
+++ b/azkaban-common/src/test/java/azkaban/executor/JavaJob.java
@@ -16,15 +16,13 @@
 
 package azkaban.executor;
 
+import azkaban.jobExecutor.JavaProcessJob;
+import azkaban.utils.Props;
 import java.io.File;
 import java.util.List;
 import java.util.StringTokenizer;
-
 import org.apache.log4j.Logger;
 
-import azkaban.jobExecutor.JavaProcessJob;
-import azkaban.utils.Props;
-
 public class JavaJob extends JavaProcessJob {
 
   public static final String RUN_METHOD_PARAM = "method.run";
@@ -35,32 +33,49 @@ public class JavaJob extends JavaProcessJob {
   public static final String DEFAULT_CANCEL_METHOD = "cancel";
   public static final String DEFAULT_RUN_METHOD = "run";
   public static final String DEFAULT_PROGRESS_METHOD = "getProgress";
-
+  private final Object _javaObject = null;
   private String _runMethod;
   private String _cancelMethod;
   private String _progressMethod;
-
-  private Object _javaObject = null;
   private String props;
 
-  public JavaJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+  public JavaJob(final String jobid, final Props sysProps, final Props jobProps, final Logger log) {
     super(jobid, sysProps, new Props(sysProps, jobProps), log);
   }
 
+  private static String getSourcePathFromClass(final Class<?> containedClass) {
+    File file =
+        new File(containedClass.getProtectionDomain().getCodeSource()
+            .getLocation().getPath());
+
+    if (!file.isDirectory() && file.getName().endsWith(".class")) {
+      final String name = containedClass.getName();
+      final StringTokenizer tokenizer = new StringTokenizer(name, ".");
+      while (tokenizer.hasMoreTokens()) {
+        tokenizer.nextElement();
+        file = file.getParentFile();
+      }
+      return file.getPath();
+    } else {
+      return containedClass.getProtectionDomain().getCodeSource().getLocation()
+          .getPath();
+    }
+  }
+
   @Override
   protected List<String> getClassPaths() {
-    List<String> classPath = super.getClassPaths();
+    final List<String> classPath = super.getClassPaths();
 
     classPath.add(getSourcePathFromClass(JavaJobRunnerMain.class));
     classPath.add(getSourcePathFromClass(Props.class));
 
-    String loggerPath = getSourcePathFromClass(org.apache.log4j.Logger.class);
+    final String loggerPath = getSourcePathFromClass(org.apache.log4j.Logger.class);
     if (!classPath.contains(loggerPath)) {
       classPath.add(loggerPath);
     }
 
     // Add hadoop home to classpath
-    String hadoopHome = System.getenv("HADOOP_HOME");
+    final String hadoopHome = System.getenv("HADOOP_HOME");
     if (hadoopHome == null) {
       info("HADOOP_HOME not set, using default hadoop config.");
     } else {
@@ -70,25 +85,6 @@ public class JavaJob extends JavaProcessJob {
     return classPath;
   }
 
-  private static String getSourcePathFromClass(Class<?> containedClass) {
-    File file =
-        new File(containedClass.getProtectionDomain().getCodeSource()
-            .getLocation().getPath());
-
-    if (!file.isDirectory() && file.getName().endsWith(".class")) {
-      String name = containedClass.getName();
-      StringTokenizer tokenizer = new StringTokenizer(name, ".");
-      while (tokenizer.hasMoreTokens()) {
-        tokenizer.nextElement();
-        file = file.getParentFile();
-      }
-      return file.getPath();
-    } else {
-      return containedClass.getProtectionDomain().getCodeSource().getLocation()
-          .getPath();
-    }
-  }
-
   @Override
   protected String getJavaClass() {
     return JavaJobRunnerMain.class.getName();
@@ -96,9 +92,9 @@ public class JavaJob extends JavaProcessJob {
 
   @Override
   public String toString() {
-    return "JavaJob{" + "_runMethod='" + _runMethod + '\''
-        + ", _cancelMethod='" + _cancelMethod + '\'' + ", _progressMethod='"
-        + _progressMethod + '\'' + ", _javaObject=" + _javaObject + ", props="
-        + props + '}';
+    return "JavaJob{" + "_runMethod='" + this._runMethod + '\''
+        + ", _cancelMethod='" + this._cancelMethod + '\'' + ", _progressMethod='"
+        + this._progressMethod + '\'' + ", _javaObject=" + this._javaObject + ", props="
+        + this.props + '}';
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/executor/JavaJobRunnerMain.java b/azkaban-common/src/test/java/azkaban/executor/JavaJobRunnerMain.java
index 7ef359f..e0ef0d1 100644
--- a/azkaban-common/src/test/java/azkaban/executor/JavaJobRunnerMain.java
+++ b/azkaban-common/src/test/java/azkaban/executor/JavaJobRunnerMain.java
@@ -16,6 +16,8 @@
 
 package azkaban.executor;
 
+import azkaban.jobExecutor.ProcessJob;
+import azkaban.utils.Props;
 import java.io.BufferedOutputStream;
 import java.io.BufferedReader;
 import java.io.FileOutputStream;
@@ -29,15 +31,11 @@ import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Properties;
-
 import org.apache.log4j.ConsoleAppender;
 import org.apache.log4j.Layout;
 import org.apache.log4j.Logger;
 import org.apache.log4j.PatternLayout;
 
-import azkaban.jobExecutor.ProcessJob;
-import azkaban.utils.Props;
-
 public class JavaJobRunnerMain {
 
   public static final String JOB_CLASS = "job.class";
@@ -51,8 +49,8 @@ public class JavaJobRunnerMain {
 
   public static final String CANCEL_METHOD_PARAM = "method.cancel";
   public static final String RUN_METHOD_PARAM = "method.run";
-  public static final String[] PROPS_CLASSES = new String[] {
-      "azkaban.utils.Props", "azkaban.common.utils.Props" };
+  public static final String[] PROPS_CLASSES = new String[]{
+      "azkaban.utils.Props", "azkaban.common.utils.Props"};
 
   private static final Layout DEFAULT_LAYOUT = new PatternLayout("%p %m\n");
 
@@ -63,11 +61,6 @@ public class JavaJobRunnerMain {
   public Object _javaObject;
   private boolean _isFinished = false;
 
-  public static void main(String[] args) throws Exception {
-    @SuppressWarnings("unused")
-    JavaJobRunnerMain wrapper = new JavaJobRunnerMain();
-  }
-
   public JavaJobRunnerMain() throws Exception {
     Runtime.getRuntime().addShutdownHook(new Thread() {
       @Override
@@ -77,171 +70,90 @@ public class JavaJobRunnerMain {
     });
 
     try {
-      _jobName = System.getenv(ProcessJob.JOB_NAME_ENV);
-      String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
+      this._jobName = System.getenv(ProcessJob.JOB_NAME_ENV);
+      final String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
 
-      _logger = Logger.getRootLogger();
-      _logger.removeAllAppenders();
-      ConsoleAppender appender = new ConsoleAppender(DEFAULT_LAYOUT);
+      this._logger = Logger.getRootLogger();
+      this._logger.removeAllAppenders();
+      final ConsoleAppender appender = new ConsoleAppender(DEFAULT_LAYOUT);
       appender.activateOptions();
-      _logger.addAppender(appender);
+      this._logger.addAppender(appender);
 
-      Properties prop = new Properties();
+      final Properties prop = new Properties();
       prop.load(new BufferedReader(new FileReader(propsFile)));
 
-      _logger.info("Running job " + _jobName);
-      String className = prop.getProperty(JOB_CLASS);
+      this._logger.info("Running job " + this._jobName);
+      final String className = prop.getProperty(JOB_CLASS);
       if (className == null) {
         throw new Exception("Class name is not set.");
       }
-      _logger.info("Class name " + className);
+      this._logger.info("Class name " + className);
 
       // Create the object using proxy
 
-      _javaObject = getObject(_jobName, className, prop, _logger);
+      this._javaObject = getObject(this._jobName, className, prop, this._logger);
 
-      if (_javaObject == null) {
-        _logger.info("Could not create java object to run job: " + className);
+      if (this._javaObject == null) {
+        this._logger.info("Could not create java object to run job: " + className);
         throw new Exception("Could not create running object");
       }
 
-      _cancelMethod =
+      this._cancelMethod =
           prop.getProperty(CANCEL_METHOD_PARAM, DEFAULT_CANCEL_METHOD);
 
       final String runMethod =
           prop.getProperty(RUN_METHOD_PARAM, DEFAULT_RUN_METHOD);
-      _logger.info("Invoking method " + runMethod);
+      this._logger.info("Invoking method " + runMethod);
 
-      _logger.info("Proxy check failed, not proxying run.");
-      runMethod(_javaObject, runMethod);
+      this._logger.info("Proxy check failed, not proxying run.");
+      runMethod(this._javaObject, runMethod);
 
-      _isFinished = true;
+      this._isFinished = true;
 
       // Get the generated properties and store them to disk, to be read
       // by ProcessJob.
       try {
         final Method generatedPropertiesMethod =
-            _javaObject.getClass().getMethod(GET_GENERATED_PROPERTIES_METHOD,
-                new Class<?>[] {});
-        Object outputGendProps =
-            generatedPropertiesMethod.invoke(_javaObject, new Object[] {});
+            this._javaObject.getClass().getMethod(GET_GENERATED_PROPERTIES_METHOD,
+                new Class<?>[]{});
+        final Object outputGendProps =
+            generatedPropertiesMethod.invoke(this._javaObject, new Object[]{});
         if (outputGendProps != null) {
           final Method toPropertiesMethod =
               outputGendProps.getClass().getMethod("toProperties",
-                  new Class<?>[] {});
-          Properties properties =
+                  new Class<?>[]{});
+          final Properties properties =
               (Properties) toPropertiesMethod.invoke(outputGendProps,
-                  new Object[] {});
+                  new Object[]{});
 
-          Props outputProps = new Props(null, properties);
+          final Props outputProps = new Props(null, properties);
           outputGeneratedProperties(outputProps);
         } else {
           outputGeneratedProperties(new Props());
         }
 
-      } catch (NoSuchMethodException e) {
-        _logger
+      } catch (final NoSuchMethodException e) {
+        this._logger
             .info(String
                 .format(
                     "Apparently there isn't a method[%s] on object[%s], using empty Props object instead.",
-                    GET_GENERATED_PROPERTIES_METHOD, _javaObject));
+                    GET_GENERATED_PROPERTIES_METHOD, this._javaObject));
         outputGeneratedProperties(new Props());
       }
-    } catch (Exception e) {
-      _isFinished = true;
+    } catch (final Exception e) {
+      this._isFinished = true;
       throw e;
     }
   }
 
-  private void runMethod(Object obj, String runMethod)
-      throws IllegalAccessException, InvocationTargetException,
-      NoSuchMethodException {
-    obj.getClass().getMethod(runMethod, new Class<?>[] {}).invoke(obj);
-  }
-
-  private void outputGeneratedProperties(Props outputProperties) {
-
-    if (outputProperties == null) {
-      _logger.info("  no gend props");
-      return;
-    }
-    for (String key : outputProperties.getKeySet()) {
-      _logger
-          .info("  gend prop " + key + " value:" + outputProperties.get(key));
-    }
-
-    String outputFileStr = System.getenv(ProcessJob.JOB_OUTPUT_PROP_FILE);
-    if (outputFileStr == null) {
-      return;
-    }
-
-    _logger.info("Outputting generated properties to " + outputFileStr);
-
-    Map<String, String> properties = new LinkedHashMap<String, String>();
-    for (String key : outputProperties.getKeySet()) {
-      properties.put(key, outputProperties.get(key));
-    }
-
-    OutputStream writer = null;
-    try {
-      writer = new BufferedOutputStream(new FileOutputStream(outputFileStr));
-
-      // Manually serialize into JSON instead of adding org.json to
-      // external classpath. Reduces one dependency for something that's
-      // essentially easy.
-      writer.write("{\n".getBytes());
-      for (Map.Entry<String, String> entry : properties.entrySet()) {
-        writer.write(String.format("  \"%s\":\"%s\",\n",
-            entry.getKey().replace("\"", "\\\\\""),
-            entry.getValue().replace("\"", "\\\\\"")).getBytes());
-      }
-      writer.write("}".getBytes());
-    } catch (Exception e) {
-      throw new RuntimeException("Unable to store output properties to: "
-          + outputFileStr);
-    } finally {
-      try {
-        if (writer != null) {
-          writer.close();
-        }
-      } catch (IOException e) {
-      }
-    }
-  }
-
-  public void cancelJob() {
-    if (_isFinished) {
-      return;
-    }
-    _logger.info("Attempting to call cancel on this job");
-    if (_javaObject != null) {
-      Method method = null;
-
-      try {
-        method = _javaObject.getClass().getMethod(_cancelMethod);
-      } catch (SecurityException e) {
-      } catch (NoSuchMethodException e) {
-      }
-
-      if (method != null)
-        try {
-          method.invoke(_javaObject);
-        } catch (Exception e) {
-          if (_logger != null) {
-            _logger.error("Cancel method failed! ", e);
-          }
-        }
-      else {
-        throw new RuntimeException("Job " + _jobName
-            + " does not have cancel method " + _cancelMethod);
-      }
-    }
+  public static void main(final String[] args) throws Exception {
+    final JavaJobRunnerMain wrapper = new JavaJobRunnerMain();
   }
 
-  private static Object getObject(String jobName, String className,
-      Properties properties, Logger logger) throws Exception {
+  private static Object getObject(final String jobName, final String className,
+      final Properties properties, final Logger logger) throws Exception {
 
-    Class<?> runningClass =
+    final Class<?> runningClass =
         JavaJobRunnerMain.class.getClassLoader().loadClass(className);
 
     if (runningClass == null) {
@@ -250,11 +162,11 @@ public class JavaJobRunnerMain {
     }
 
     Class<?> propsClass = null;
-    for (String propClassName : PROPS_CLASSES) {
+    for (final String propClassName : PROPS_CLASSES) {
       try {
         propsClass =
             JavaJobRunnerMain.class.getClassLoader().loadClass(propClassName);
-      } catch (ClassNotFoundException e) {
+      } catch (final ClassNotFoundException e) {
       }
 
       if (propsClass != null
@@ -269,55 +181,140 @@ public class JavaJobRunnerMain {
     if (propsClass != null
         && getConstructor(runningClass, String.class, propsClass) != null) {
       // Create props class
-      Constructor<?> propsCon =
+      final Constructor<?> propsCon =
           getConstructor(propsClass, propsClass, Properties[].class);
-      Object props =
-          propsCon.newInstance(null, new Properties[] { properties });
+      final Object props =
+          propsCon.newInstance(null, new Properties[]{properties});
 
-      Constructor<?> con =
+      final Constructor<?> con =
           getConstructor(runningClass, String.class, propsClass);
       logger.info("Constructor found " + con.toGenericString());
       obj = con.newInstance(jobName, props);
     } else if (getConstructor(runningClass, String.class, Properties.class) != null) {
 
-      Constructor<?> con =
+      final Constructor<?> con =
           getConstructor(runningClass, String.class, Properties.class);
       logger.info("Constructor found " + con.toGenericString());
       obj = con.newInstance(jobName, properties);
     } else if (getConstructor(runningClass, String.class, Map.class) != null) {
-      Constructor<?> con =
+      final Constructor<?> con =
           getConstructor(runningClass, String.class, Map.class);
       logger.info("Constructor found " + con.toGenericString());
 
-      HashMap<Object, Object> map = new HashMap<Object, Object>();
-      for (Map.Entry<Object, Object> entry : properties.entrySet()) {
+      final HashMap<Object, Object> map = new HashMap<>();
+      for (final Map.Entry<Object, Object> entry : properties.entrySet()) {
         map.put(entry.getKey(), entry.getValue());
       }
       obj = con.newInstance(jobName, map);
     } else if (getConstructor(runningClass, String.class) != null) {
-      Constructor<?> con = getConstructor(runningClass, String.class);
+      final Constructor<?> con = getConstructor(runningClass, String.class);
       logger.info("Constructor found " + con.toGenericString());
       obj = con.newInstance(jobName);
     } else if (getConstructor(runningClass) != null) {
-      Constructor<?> con = getConstructor(runningClass);
+      final Constructor<?> con = getConstructor(runningClass);
       logger.info("Constructor found " + con.toGenericString());
       obj = con.newInstance();
     } else {
       logger.error("Constructor not found. Listing available Constructors.");
-      for (Constructor<?> c : runningClass.getConstructors()) {
+      for (final Constructor<?> c : runningClass.getConstructors()) {
         logger.info(c.toGenericString());
       }
     }
     return obj;
   }
 
-  private static Constructor<?> getConstructor(Class<?> c, Class<?>... args) {
+  private static Constructor<?> getConstructor(final Class<?> c, final Class<?>... args) {
     try {
-      Constructor<?> cons = c.getConstructor(args);
+      final Constructor<?> cons = c.getConstructor(args);
       return cons;
-    } catch (NoSuchMethodException e) {
+    } catch (final NoSuchMethodException e) {
       return null;
     }
   }
 
+  private void runMethod(final Object obj, final String runMethod)
+      throws IllegalAccessException, InvocationTargetException,
+      NoSuchMethodException {
+    obj.getClass().getMethod(runMethod, new Class<?>[]{}).invoke(obj);
+  }
+
+  private void outputGeneratedProperties(final Props outputProperties) {
+
+    if (outputProperties == null) {
+      this._logger.info("  no gend props");
+      return;
+    }
+    for (final String key : outputProperties.getKeySet()) {
+      this._logger
+          .info("  gend prop " + key + " value:" + outputProperties.get(key));
+    }
+
+    final String outputFileStr = System.getenv(ProcessJob.JOB_OUTPUT_PROP_FILE);
+    if (outputFileStr == null) {
+      return;
+    }
+
+    this._logger.info("Outputting generated properties to " + outputFileStr);
+
+    final Map<String, String> properties = new LinkedHashMap<>();
+    for (final String key : outputProperties.getKeySet()) {
+      properties.put(key, outputProperties.get(key));
+    }
+
+    OutputStream writer = null;
+    try {
+      writer = new BufferedOutputStream(new FileOutputStream(outputFileStr));
+
+      // Manually serialize into JSON instead of adding org.json to
+      // external classpath. Reduces one dependency for something that's
+      // essentially easy.
+      writer.write("{\n".getBytes());
+      for (final Map.Entry<String, String> entry : properties.entrySet()) {
+        writer.write(String.format("  \"%s\":\"%s\",\n",
+            entry.getKey().replace("\"", "\\\\\""),
+            entry.getValue().replace("\"", "\\\\\"")).getBytes());
+      }
+      writer.write("}".getBytes());
+    } catch (final Exception e) {
+      throw new RuntimeException("Unable to store output properties to: "
+          + outputFileStr);
+    } finally {
+      try {
+        if (writer != null) {
+          writer.close();
+        }
+      } catch (final IOException e) {
+      }
+    }
+  }
+
+  public void cancelJob() {
+    if (this._isFinished) {
+      return;
+    }
+    this._logger.info("Attempting to call cancel on this job");
+    if (this._javaObject != null) {
+      Method method = null;
+
+      try {
+        method = this._javaObject.getClass().getMethod(this._cancelMethod);
+      } catch (final SecurityException e) {
+      } catch (final NoSuchMethodException e) {
+      }
+
+      if (method != null) {
+        try {
+          method.invoke(this._javaObject);
+        } catch (final Exception e) {
+          if (this._logger != null) {
+            this._logger.error("Cancel method failed! ", e);
+          }
+        }
+      } else {
+        throw new RuntimeException("Job " + this._jobName
+            + " does not have cancel method " + this._cancelMethod);
+      }
+    }
+  }
+
 }
diff --git a/azkaban-common/src/test/java/azkaban/executor/mail/DefaultMailCreatorTest.java b/azkaban-common/src/test/java/azkaban/executor/mail/DefaultMailCreatorTest.java
index 462216d..7a940b3 100644
--- a/azkaban-common/src/test/java/azkaban/executor/mail/DefaultMailCreatorTest.java
+++ b/azkaban-common/src/test/java/azkaban/executor/mail/DefaultMailCreatorTest.java
@@ -1,5 +1,9 @@
 package azkaban.executor.mail;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
 import azkaban.executor.Status;
@@ -9,19 +13,14 @@ import azkaban.project.Project;
 import azkaban.utils.EmailMessage;
 import com.google.common.base.Charsets;
 import com.google.common.collect.ImmutableList;
+import java.io.InputStream;
+import java.util.TimeZone;
 import org.apache.commons.io.IOUtils;
 import org.joda.time.DateTimeUtils;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.InputStream;
-import java.util.TimeZone;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-
 public class DefaultMailCreatorTest {
 
   // 2016/07/17 11:54:11 EEST
@@ -46,82 +45,89 @@ public class DefaultMailCreatorTest {
 
   @Before
   public void setUp() throws Exception {
-    defaultTz = TimeZone.getDefault();
-    assertNotNull(defaultTz);
+    this.defaultTz = TimeZone.getDefault();
+    assertNotNull(this.defaultTz);
     // EEST
     TimeZone.setDefault(TimeZone.getTimeZone("Europe/Helsinki"));
     DateTimeUtils.setCurrentMillisFixed(FIXED_CURRENT_TIME_MILLIS);
 
-    mailCreator = new DefaultMailCreator();
+    this.mailCreator = new DefaultMailCreator();
 
-    flow = new Flow("mail-creator-test");
-    project = new Project(1, "test-project");
-    options = new ExecutionOptions();
-    message = new EmailMessage();
+    this.flow = new Flow("mail-creator-test");
+    this.project = new Project(1, "test-project");
+    this.options = new ExecutionOptions();
+    this.message = new EmailMessage();
 
-    azkabanName = "unit-tests";
-    scheme = "http";
-    clientHostname = "localhost";
-    clientPortNumber = "8081";
+    this.azkabanName = "unit-tests";
+    this.scheme = "http";
+    this.clientHostname = "localhost";
+    this.clientPortNumber = "8081";
 
-    Node failedNode = new Node("test-job");
+    final Node failedNode = new Node("test-job");
     failedNode.setType("noop");
-    flow.addNode(failedNode);
+    this.flow.addNode(failedNode);
 
-    executableFlow = new ExecutableFlow(project, flow);
-    executableFlow.setExecutionOptions(options);
-    executableFlow.setStartTime(START_TIME_MILLIS);
+    this.executableFlow = new ExecutableFlow(this.project, this.flow);
+    this.executableFlow.setExecutionOptions(this.options);
+    this.executableFlow.setStartTime(START_TIME_MILLIS);
 
-    options.setFailureEmails(ImmutableList.of("test@example.com"));
-    options.setSuccessEmails(ImmutableList.of("test@example.com"));
+    this.options.setFailureEmails(ImmutableList.of("test@example.com"));
+    this.options.setSuccessEmails(ImmutableList.of("test@example.com"));
   }
 
   @After
   public void tearDown() throws Exception {
-    if (defaultTz != null) {
-      TimeZone.setDefault(defaultTz);
+    if (this.defaultTz != null) {
+      TimeZone.setDefault(this.defaultTz);
     }
     DateTimeUtils.setCurrentMillisSystem();
   }
 
-  private void setJobStatus(Status status) {
-    executableFlow.getExecutableNodes().get(0).setStatus(status);
+  private void setJobStatus(final Status status) {
+    this.executableFlow.getExecutableNodes().get(0).setStatus(status);
   }
 
   @Test
   public void createErrorEmail() throws Exception {
     setJobStatus(Status.FAILED);
-    executableFlow.setEndTime(END_TIME_MILLIS);
-    executableFlow.setStatus(Status.FAILED);
-    assertTrue(mailCreator.createErrorEmail(
-        executableFlow, message, azkabanName, scheme, clientHostname, clientPortNumber));
-    assertEquals("Flow 'mail-creator-test' has failed on unit-tests", message.getSubject());
-    assertEquals(read("errorEmail.html").replaceAll("\\s+", ""), message.getBody().replaceAll("\\s+", ""));
+    this.executableFlow.setEndTime(END_TIME_MILLIS);
+    this.executableFlow.setStatus(Status.FAILED);
+    assertTrue(this.mailCreator.createErrorEmail(
+        this.executableFlow, this.message, this.azkabanName, this.scheme, this.clientHostname,
+        this.clientPortNumber));
+    assertEquals("Flow 'mail-creator-test' has failed on unit-tests", this.message.getSubject());
+    assertEquals(read("errorEmail.html").replaceAll("\\s+", ""),
+        this.message.getBody().replaceAll("\\s+", ""));
   }
 
   @Test
   public void createFirstErrorMessage() throws Exception {
     setJobStatus(Status.FAILED);
-    executableFlow.setStatus(Status.FAILED_FINISHING);
-    assertTrue(mailCreator.createFirstErrorMessage(
-        executableFlow, message, azkabanName, scheme, clientHostname, clientPortNumber));
-    assertEquals("Flow 'mail-creator-test' has encountered a failure on unit-tests", message.getSubject());
-    assertEquals(read("firstErrorMessage.html").replaceAll("\\s+", ""), message.getBody().replaceAll("\\s+", ""));
+    this.executableFlow.setStatus(Status.FAILED_FINISHING);
+    assertTrue(this.mailCreator.createFirstErrorMessage(
+        this.executableFlow, this.message, this.azkabanName, this.scheme, this.clientHostname,
+        this.clientPortNumber));
+    assertEquals("Flow 'mail-creator-test' has encountered a failure on unit-tests",
+        this.message.getSubject());
+    assertEquals(read("firstErrorMessage.html").replaceAll("\\s+", ""),
+        this.message.getBody().replaceAll("\\s+", ""));
   }
 
   @Test
   public void createSuccessEmail() throws Exception {
     setJobStatus(Status.SUCCEEDED);
-    executableFlow.setEndTime(END_TIME_MILLIS);
-    executableFlow.setStatus(Status.SUCCEEDED);
-    assertTrue(mailCreator.createSuccessEmail(
-        executableFlow, message, azkabanName, scheme, clientHostname, clientPortNumber));
-    assertEquals("Flow 'mail-creator-test' has succeeded on unit-tests", message.getSubject());
-    assertEquals(read("successEmail.html").replaceAll("\\s+", ""), message.getBody().replaceAll("\\s+", ""));
+    this.executableFlow.setEndTime(END_TIME_MILLIS);
+    this.executableFlow.setStatus(Status.SUCCEEDED);
+    assertTrue(this.mailCreator.createSuccessEmail(
+        this.executableFlow, this.message, this.azkabanName, this.scheme, this.clientHostname,
+        this.clientPortNumber));
+    assertEquals("Flow 'mail-creator-test' has succeeded on unit-tests", this.message.getSubject());
+    assertEquals(read("successEmail.html").replaceAll("\\s+", ""),
+        this.message.getBody().replaceAll("\\s+", ""));
   }
 
-  private String read(String file) throws Exception {
-    InputStream is = DefaultMailCreatorTest.class.getResourceAsStream(file);
+  private String read(final String file) throws Exception {
+    final InputStream is = DefaultMailCreatorTest.class.getResourceAsStream(file);
     return IOUtils.toString(is, Charsets.UTF_8).trim();
   }
 
diff --git a/azkaban-common/src/test/java/azkaban/executor/QueuedExecutionsTest.java b/azkaban-common/src/test/java/azkaban/executor/QueuedExecutionsTest.java
index b938b56..ce5b34c 100644
--- a/azkaban-common/src/test/java/azkaban/executor/QueuedExecutionsTest.java
+++ b/azkaban-common/src/test/java/azkaban/executor/QueuedExecutionsTest.java
@@ -1,23 +1,21 @@
 package azkaban.executor;
 
+import azkaban.flow.Flow;
+import azkaban.project.Project;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Pair;
+import azkaban.utils.TestUtils;
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
-
 import org.junit.Assert;
 import org.junit.Test;
 
-import azkaban.flow.Flow;
-import azkaban.project.Project;
-import azkaban.utils.JSONUtils;
-import azkaban.utils.Pair;
-import azkaban.utils.TestUtils;
-
 public class QueuedExecutionsTest {
 
-  private File getFlowDir(String flow) {
+  private File getFlowDir(final String flow) {
     return TestUtils.getFlowDir("exectest1", flow);
   }
 
@@ -26,27 +24,26 @@ public class QueuedExecutionsTest {
    * serialized description
    */
   private Pair<ExecutionReference, ExecutableFlow> createExecutablePair(
-    String flowName, int execId) throws IOException {
-    File jsonFlowFile = getFlowDir(flowName);
-    @SuppressWarnings("unchecked")
-    HashMap<String, Object> flowObj =
-      (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
-
-    Flow flow = Flow.flowFromObject(flowObj);
-    Project project = new Project(1, "flow");
-    HashMap<String, Flow> flowMap = new HashMap<String, Flow>();
+      final String flowName, final int execId) throws IOException {
+    final File jsonFlowFile = getFlowDir(flowName);
+    final HashMap<String, Object> flowObj =
+        (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
+
+    final Flow flow = Flow.flowFromObject(flowObj);
+    final Project project = new Project(1, "flow");
+    final HashMap<String, Flow> flowMap = new HashMap<>();
     flowMap.put(flow.getId(), flow);
     project.setFlows(flowMap);
-    ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+    final ExecutableFlow execFlow = new ExecutableFlow(project, flow);
     execFlow.setExecutionId(execId);
-    ExecutionReference ref = new ExecutionReference(execId);
-    return new Pair<ExecutionReference, ExecutableFlow>(ref, execFlow);
+    final ExecutionReference ref = new ExecutionReference(execId);
+    return new Pair<>(ref, execFlow);
   }
 
   public List<Pair<ExecutionReference, ExecutableFlow>> getDummyData()
-    throws IOException {
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList =
-      new ArrayList<Pair<ExecutionReference, ExecutableFlow>>();
+      throws IOException {
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList =
+        new ArrayList<>();
     dataList.add(createExecutablePair("exec1", 1));
     dataList.add(createExecutablePair("exec2", 2));
     return dataList;
@@ -55,10 +52,10 @@ public class QueuedExecutionsTest {
   /* Test enqueue method happy case */
   @Test
   public void testEnqueueHappyCase() throws IOException,
-    ExecutorManagerException {
-    QueuedExecutions queue = new QueuedExecutions(5);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
-    for (Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
+      ExecutorManagerException {
+    final QueuedExecutions queue = new QueuedExecutions(5);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+    for (final Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
       queue.enqueue(pair.getSecond(), pair.getFirst());
     }
 
@@ -69,10 +66,10 @@ public class QueuedExecutionsTest {
   /* Test enqueue duplicate execution ids */
   @Test(expected = ExecutorManagerException.class)
   public void testEnqueueDuplicateExecution() throws IOException,
-    ExecutorManagerException {
-    Pair<ExecutionReference, ExecutableFlow> pair1 =
-      createExecutablePair("exec1", 1);
-    QueuedExecutions queue = new QueuedExecutions(5);
+      ExecutorManagerException {
+    final Pair<ExecutionReference, ExecutableFlow> pair1 =
+        createExecutablePair("exec1", 1);
+    final QueuedExecutions queue = new QueuedExecutions(5);
     queue.enqueue(pair1.getSecond(), pair1.getFirst());
     queue.enqueue(pair1.getSecond(), pair1.getFirst());
   }
@@ -80,10 +77,10 @@ public class QueuedExecutionsTest {
   /* Test enqueue more than capacity */
   @Test(expected = ExecutorManagerException.class)
   public void testEnqueueOverflow() throws IOException,
-    ExecutorManagerException {
-    Pair<ExecutionReference, ExecutableFlow> pair1 =
-      createExecutablePair("exec1", 1);
-    QueuedExecutions queue = new QueuedExecutions(1);
+      ExecutorManagerException {
+    final Pair<ExecutionReference, ExecutableFlow> pair1 =
+        createExecutablePair("exec1", 1);
+    final QueuedExecutions queue = new QueuedExecutions(1);
     queue.enqueue(pair1.getSecond(), pair1.getFirst());
     queue.enqueue(pair1.getSecond(), pair1.getFirst());
   }
@@ -91,8 +88,8 @@ public class QueuedExecutionsTest {
   /* Test EnqueueAll method */
   @Test
   public void testEnqueueAll() throws IOException, ExecutorManagerException {
-    QueuedExecutions queue = new QueuedExecutions(5);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+    final QueuedExecutions queue = new QueuedExecutions(5);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
     Assert.assertTrue(queue.getAllEntries().containsAll(dataList));
     Assert.assertTrue(dataList.containsAll(queue.getAllEntries()));
@@ -101,8 +98,8 @@ public class QueuedExecutionsTest {
   /* Test size method */
   @Test
   public void testSize() throws IOException, ExecutorManagerException {
-    QueuedExecutions queue = new QueuedExecutions(5);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+    final QueuedExecutions queue = new QueuedExecutions(5);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
     Assert.assertEquals(queue.size(), 2);
   }
@@ -110,8 +107,8 @@ public class QueuedExecutionsTest {
   /* Test dequeue method */
   @Test
   public void testDequeue() throws IOException, ExecutorManagerException {
-    QueuedExecutions queue = new QueuedExecutions(5);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+    final QueuedExecutions queue = new QueuedExecutions(5);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
     queue.dequeue(dataList.get(0).getFirst().getExecId());
     Assert.assertEquals(queue.size(), 1);
@@ -121,8 +118,8 @@ public class QueuedExecutionsTest {
   /* Test clear method */
   @Test
   public void testClear() throws IOException, ExecutorManagerException {
-    QueuedExecutions queue = new QueuedExecutions(5);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+    final QueuedExecutions queue = new QueuedExecutions(5);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
     Assert.assertEquals(queue.size(), 2);
     queue.clear();
@@ -132,8 +129,8 @@ public class QueuedExecutionsTest {
   /* Test isEmpty method */
   @Test
   public void testIsEmpty() throws IOException, ExecutorManagerException {
-    QueuedExecutions queue = new QueuedExecutions(5);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+    final QueuedExecutions queue = new QueuedExecutions(5);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     Assert.assertTrue(queue.isEmpty());
     queue.enqueueAll(dataList);
     Assert.assertEquals(queue.size(), 2);
@@ -144,9 +141,9 @@ public class QueuedExecutionsTest {
   /* Test fetchHead method */
   @Test
   public void testFetchHead() throws IOException, ExecutorManagerException,
-    InterruptedException {
-    QueuedExecutions queue = new QueuedExecutions(5);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+      InterruptedException {
+    final QueuedExecutions queue = new QueuedExecutions(5);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     Assert.assertTrue(queue.isEmpty());
     queue.enqueueAll(dataList);
     Assert.assertEquals(queue.fetchHead(), dataList.get(0));
@@ -156,9 +153,9 @@ public class QueuedExecutionsTest {
   /* Test isFull method */
   @Test
   public void testIsFull() throws IOException, ExecutorManagerException,
-    InterruptedException {
-    QueuedExecutions queue = new QueuedExecutions(2);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+      InterruptedException {
+    final QueuedExecutions queue = new QueuedExecutions(2);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
     Assert.assertTrue(queue.isFull());
   }
@@ -166,11 +163,11 @@ public class QueuedExecutionsTest {
   /* Test hasExecution method */
   @Test
   public void testHasExecution() throws IOException, ExecutorManagerException,
-    InterruptedException {
-    QueuedExecutions queue = new QueuedExecutions(2);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+      InterruptedException {
+    final QueuedExecutions queue = new QueuedExecutions(2);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
-    for (Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
+    for (final Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
       Assert.assertTrue(queue.hasExecution(pair.getFirst().getExecId()));
     }
     Assert.assertFalse(queue.hasExecution(5));
@@ -181,26 +178,26 @@ public class QueuedExecutionsTest {
   /* Test getFlow method */
   @Test
   public void testGetFlow() throws IOException, ExecutorManagerException,
-    InterruptedException {
-    QueuedExecutions queue = new QueuedExecutions(2);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+      InterruptedException {
+    final QueuedExecutions queue = new QueuedExecutions(2);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
-    for (Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
+    for (final Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
       Assert.assertEquals(pair.getSecond(),
-        queue.getFlow(pair.getFirst().getExecId()));
+          queue.getFlow(pair.getFirst().getExecId()));
     }
   }
 
   /* Test getReferences method */
   @Test
   public void testGetReferences() throws IOException, ExecutorManagerException,
-    InterruptedException {
-    QueuedExecutions queue = new QueuedExecutions(2);
-    List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
+      InterruptedException {
+    final QueuedExecutions queue = new QueuedExecutions(2);
+    final List<Pair<ExecutionReference, ExecutableFlow>> dataList = getDummyData();
     queue.enqueueAll(dataList);
-    for (Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
+    for (final Pair<ExecutionReference, ExecutableFlow> pair : dataList) {
       Assert.assertEquals(pair.getFirst(),
-        queue.getReference(pair.getFirst().getExecId()));
+          queue.getReference(pair.getFirst().getExecId()));
     }
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/executor/SelectorTest.java b/azkaban-common/src/test/java/azkaban/executor/SelectorTest.java
index ac8c65c..713eb2e 100644
--- a/azkaban-common/src/test/java/azkaban/executor/SelectorTest.java
+++ b/azkaban-common/src/test/java/azkaban/executor/SelectorTest.java
@@ -16,13 +16,21 @@
 
 package azkaban.executor;
 
+import azkaban.executor.selector.CandidateComparator;
+import azkaban.executor.selector.CandidateFilter;
+import azkaban.executor.selector.CandidateSelector;
+import azkaban.executor.selector.ExecutorComparator;
+import azkaban.executor.selector.ExecutorFilter;
+import azkaban.executor.selector.ExecutorSelector;
+import azkaban.executor.selector.FactorComparator;
+import azkaban.executor.selector.FactorFilter;
+import azkaban.utils.JSONUtils;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.log4j.BasicConfigurator;
 import org.junit.After;
 import org.junit.Assert;
@@ -30,239 +38,52 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import azkaban.executor.selector.*;
-import azkaban.utils.JSONUtils;
-
 public class SelectorTest {
-  // mock executor object.
-  static class MockExecutorObject implements Comparable <MockExecutorObject>{
-    public String name;
-    public int    port;
-    public double percentOfRemainingMemory;
-    public int    amountOfRemainingMemory;
-    public int    priority;
-    public Date   lastAssigned;
-    public double percentOfRemainingFlowcapacity;
-    public int    remainingTmp;
-
-    public MockExecutorObject(String name,
-        int port,
-        double percentOfRemainingMemory,
-        int amountOfRemainingMemory,
-        int priority,
-        Date lastAssigned,
-        double percentOfRemainingFlowcapacity,
-        int remainingTmp)
-    {
-      this.name = name;
-      this.port = port;
-      this.percentOfRemainingMemory = percentOfRemainingMemory;
-      this.amountOfRemainingMemory =amountOfRemainingMemory;
-      this.priority = priority;
-      this.lastAssigned = lastAssigned;
-      this.percentOfRemainingFlowcapacity = percentOfRemainingFlowcapacity;
-      this.remainingTmp = remainingTmp;
-    }
-
-    @Override
-    public String toString()
-    {
-      return this.name;
-    }
-
-    @Override
-    public int compareTo(MockExecutorObject o) {
-      return null == o ? 1 : this.hashCode() - o.hashCode();
-    }
-  }
-
-  // Mock flow object.
-  static class MockFlowObject{
-    public String name;
-    public int    requiredRemainingMemory;
-    public int    requiredTotalMemory;
-    public int    requiredRemainingTmpSpace;
-    public int    priority;
-
-    public MockFlowObject(String name,
-        int requiredTotalMemory,
-        int requiredRemainingMemory,
-        int requiredRemainingTmpSpace,
-        int priority)
-    {
-      this.name = name;
-      this.requiredTotalMemory = requiredTotalMemory;
-      this.requiredRemainingMemory = requiredRemainingMemory;
-      this.requiredRemainingTmpSpace = requiredRemainingTmpSpace;
-      this.priority = priority;
-    }
-
-    @Override
-    public String toString()
-    {
-      return this.name;
-    }
-  }
-
-  // mock Filter class.
-  static class MockFilter
-  extends CandidateFilter<MockExecutorObject,MockFlowObject>{
-
-    @Override
-    public String getName() {
-      return "Mockfilter";
-    }
-
-    public MockFilter(){
-    }
-
-    // function to register the remainingMemory filter.
-    // for test purpose the registration is put in a separated method, in production the work should be done
-    // in the constructor.
-    public void registerFilterforTotalMemory(){
-      this.registerFactorFilter(FactorFilter.create("requiredTotalMemory", (itemToCheck, sourceObject) -> {
-        // REAL LOGIC COMES HERE -
-        if (null == itemToCheck || null == sourceObject){
-          return false;
-        }
-
-        // Box has infinite memory.:)
-        if (itemToCheck.percentOfRemainingMemory == 0) {
-          return true;
-        }
-
-        // calculate the memory and return.
-        return itemToCheck.amountOfRemainingMemory / itemToCheck.percentOfRemainingMemory * 100 >
-               sourceObject.requiredTotalMemory;
-      }));
-    }
-
-    public void registerFilterforRemainingMemory(){
-      this.registerFactorFilter(FactorFilter.create("requiredRemainingMemory", (itemToCheck, sourceObject) -> {
-        // REAL LOGIC COMES HERE -
-        if (null == itemToCheck || null == sourceObject){
-          return false;
-       }
-       return itemToCheck.amountOfRemainingMemory > sourceObject.requiredRemainingMemory;
-      }));
-    }
-
-    public void registerFilterforPriority(){
-      this.registerFactorFilter(FactorFilter.create("requiredProprity", (itemToCheck, sourceObject) -> {
-        // REAL LOGIC COMES HERE -
-        if (null == itemToCheck || null == sourceObject){
-          return false;
-        }
-
-        // priority value, the bigger the lower.
-        return itemToCheck.priority >= sourceObject.priority;
-      }));
-    }
-
-    public void registerFilterforRemainingTmpSpace(){
-      this.registerFactorFilter(FactorFilter.create("requiredRemainingTmpSpace", (itemToCheck, sourceObject) -> {
-        // REAL LOGIC COMES HERE -
-        if (null == itemToCheck || null == sourceObject){
-          return false;
-        }
-
-       return itemToCheck.remainingTmp > sourceObject.requiredRemainingTmpSpace;
-      }));
-    }
-
-  }
-
-  // mock comparator class.
-  static class MockComparator
-  extends CandidateComparator<MockExecutorObject>{
-
-    @Override
-    public String getName() {
-      return "MockComparator";
-    }
-
-    @Override
-    protected boolean tieBreak(MockExecutorObject object1, MockExecutorObject object2){
-      if (null == object2) return true;
-      if (null == object1) return false;
-      return object1.name.compareTo(object2.name) >= 0;
-    }
-
-    public MockComparator(){
-    }
-
-    public void registerComparerForMemory(int weight){
-      this.registerFactorComparator(FactorComparator.create("Memory", weight, (o1, o2) -> {
-        int result = 0 ;
-
-        // check remaining amount of memory.
-        result = o1.amountOfRemainingMemory - o2.amountOfRemainingMemory;
-        if (result != 0){
-          return result > 0 ? 1 : -1;
-        }
-
-        // check remaining % .
-        result = (int)(o1.percentOfRemainingMemory - o2.percentOfRemainingMemory);
-        return result == 0 ? 0 : result > 0 ? 1 : -1;
-
-      }));
-    }
-
-    public void registerComparerForRemainingSpace(int weight){
-      this.registerFactorComparator(FactorComparator.create("RemainingTmp", weight, (o1, o2) -> {
-        int result = 0 ;
-
-        // check remaining % .
-        result = (int)(o1.remainingTmp - o2.remainingTmp);
-        return result == 0 ? 0 : result > 0 ? 1 : -1;
-
-      }));
-    }
-
-    public void registerComparerForPriority(int weight){
-      this.registerFactorComparator(FactorComparator.create("Priority", weight, (o1, o2) -> {
-        int result = 0 ;
-
-        // check priority, bigger the better.
-        result = (int)(o1.priority - o2.priority);
-        return result == 0 ? 0 : result > 0 ? 1 : -1;
-
-      }));
-    }
-  }
 
   // test samples.
-  protected ArrayList<MockExecutorObject> executorList = new ArrayList<MockExecutorObject>();
+  protected ArrayList<MockExecutorObject> executorList = new ArrayList<>();
 
-  @BeforeClass public static void onlyOnce() {
+  @BeforeClass
+  public static void onlyOnce() {
     BasicConfigurator.configure();
-   }
+  }
 
   @Before
   public void setUp() throws Exception {
-    executorList.clear();
-    executorList.add(new MockExecutorObject("Executor1",8080,50.0,2048,5,new Date(), 20, 6400));
-    executorList.add(new MockExecutorObject("Executor2",8080,50.0,2048,4,new Date(), 20, 6400));
-    executorList.add(new MockExecutorObject("Executor3",8080,40.0,2048,1,new Date(), 20, 6400));
-    executorList.add(new MockExecutorObject("Executor4",8080,50.0,2048,4,new Date(), 20, 6400));
-    executorList.add(new MockExecutorObject("Executor5",8080,50.0,1024,5,new Date(), 90, 6400));
-    executorList.add(new MockExecutorObject("Executor6",8080,50.0,1024,5,new Date(), 90, 3200));
-    executorList.add(new MockExecutorObject("Executor7",8080,50.0,1024,5,new Date(), 90, 3200));
-    executorList.add(new MockExecutorObject("Executor8",8080,50.0,2048,1,new Date(), 90, 3200));
-    executorList.add(new MockExecutorObject("Executor9",8080,50.0,2050,5,new Date(), 90, 4200));
-    executorList.add(new MockExecutorObject("Executor10",8080,00.0,1024,1,new Date(), 90, 3200));
-    executorList.add(new MockExecutorObject("Executor11",8080,20.0,2096,3,new Date(), 90, 2400));
-    executorList.add(new MockExecutorObject("Executor12",8080,90.0,2050,5,new Date(), 60, 2500));
-
+    this.executorList.clear();
+    this.executorList
+        .add(new MockExecutorObject("Executor1", 8080, 50.0, 2048, 5, new Date(), 20, 6400));
+    this.executorList
+        .add(new MockExecutorObject("Executor2", 8080, 50.0, 2048, 4, new Date(), 20, 6400));
+    this.executorList
+        .add(new MockExecutorObject("Executor3", 8080, 40.0, 2048, 1, new Date(), 20, 6400));
+    this.executorList
+        .add(new MockExecutorObject("Executor4", 8080, 50.0, 2048, 4, new Date(), 20, 6400));
+    this.executorList
+        .add(new MockExecutorObject("Executor5", 8080, 50.0, 1024, 5, new Date(), 90, 6400));
+    this.executorList
+        .add(new MockExecutorObject("Executor6", 8080, 50.0, 1024, 5, new Date(), 90, 3200));
+    this.executorList
+        .add(new MockExecutorObject("Executor7", 8080, 50.0, 1024, 5, new Date(), 90, 3200));
+    this.executorList
+        .add(new MockExecutorObject("Executor8", 8080, 50.0, 2048, 1, new Date(), 90, 3200));
+    this.executorList
+        .add(new MockExecutorObject("Executor9", 8080, 50.0, 2050, 5, new Date(), 90, 4200));
+    this.executorList
+        .add(new MockExecutorObject("Executor10", 8080, 00.0, 1024, 1, new Date(), 90, 3200));
+    this.executorList
+        .add(new MockExecutorObject("Executor11", 8080, 20.0, 2096, 3, new Date(), 90, 2400));
+    this.executorList
+        .add(new MockExecutorObject("Executor12", 8080, 90.0, 2050, 5, new Date(), 60, 2500));
 
     // make sure each time the order is different.
     Collections.shuffle(this.executorList);
   }
 
-  private MockExecutorObject  getExecutorByName(String name){
+  private MockExecutorObject getExecutorByName(final String name) {
     MockExecutorObject returnVal = null;
-    for (MockExecutorObject item : this.executorList){
-      if (item.name.equals(name)){
+    for (final MockExecutorObject item : this.executorList) {
+      if (item.name.equals(name)) {
         returnVal = item;
         break;
       }
@@ -277,42 +98,42 @@ public class SelectorTest {
   @Test
   public void testExecutorFilter() throws Exception {
 
-      // mock object, remaining memory 11500, total memory 3095, remainingTmpSpace 4200, priority 2.
-      MockFlowObject  dispatchingObj = new MockFlowObject("flow1",3096, 1500,4200,2);
+    // mock object, remaining memory 11500, total memory 3095, remainingTmpSpace 4200, priority 2.
+    final MockFlowObject dispatchingObj = new MockFlowObject("flow1", 3096, 1500, 4200, 2);
 
-      MockFilter mFilter = new MockFilter();
-      mFilter.registerFilterforRemainingMemory();
+    final MockFilter mFilter = new MockFilter();
+    mFilter.registerFilterforRemainingMemory();
 
-      // expect true.
-      boolean result = mFilter.filterTarget(this.getExecutorByName("Executor1"), dispatchingObj);
-      Assert.assertTrue(result);
+    // expect true.
+    boolean result = mFilter.filterTarget(this.getExecutorByName("Executor1"), dispatchingObj);
+    Assert.assertTrue(result);
 
-      //expect true.
-      result = mFilter.filterTarget(this.getExecutorByName("Executor3"), dispatchingObj);
+    //expect true.
+    result = mFilter.filterTarget(this.getExecutorByName("Executor3"), dispatchingObj);
       /*
       1 [main] INFO azkaban.executor.Selector.CandidateFilter  - start checking 'Executor3' with factor filter for 'Mockfilter'
       2 [main] INFO azkaban.executor.Selector.CandidateFilter  - [Factor: requiredRemainingMemory] filter result : true
       2 [main] INFO azkaban.executor.Selector.CandidateFilter  - Final checking result : true
       */
-      Assert.assertTrue(result);
+    Assert.assertTrue(result);
 
-      // add the priority filter.
-      mFilter.registerFilterforPriority();
-      result = mFilter.filterTarget(this.getExecutorByName("Executor3"), dispatchingObj);
-      // expect false, for priority.
+    // add the priority filter.
+    mFilter.registerFilterforPriority();
+    result = mFilter.filterTarget(this.getExecutorByName("Executor3"), dispatchingObj);
+    // expect false, for priority.
       /*
       2 [main] INFO azkaban.executor.Selector.CandidateFilter  - start checking 'Executor3' with factor filter for 'Mockfilter'
       2 [main] INFO azkaban.executor.Selector.CandidateFilter  - [Factor: requiredRemainingMemory] filter result : true
       2 [main] INFO azkaban.executor.Selector.CandidateFilter  - [Factor: requiredProprity] filter result : false
       2 [main] INFO azkaban.executor.Selector.CandidateFilter  - Final checking result : false
       */
-      Assert.assertFalse(result);
+    Assert.assertFalse(result);
 
-      // add the remaining space filter.
-      mFilter.registerFilterforRemainingTmpSpace();
+    // add the remaining space filter.
+    mFilter.registerFilterforRemainingTmpSpace();
 
-      // expect pass.
-      result = mFilter.filterTarget(this.getExecutorByName("Executor2"), dispatchingObj);
+    // expect pass.
+    result = mFilter.filterTarget(this.getExecutorByName("Executor2"), dispatchingObj);
       /*
       3 [main] INFO azkaban.executor.Selector.CandidateFilter  - start checking 'Executor2' with factor filter for 'Mockfilter'
       3 [main] INFO azkaban.executor.Selector.CandidateFilter  - [Factor: requiredRemainingMemory] filter result : true
@@ -320,42 +141,42 @@ public class SelectorTest {
       3 [main] INFO azkaban.executor.Selector.CandidateFilter  - [Factor: requiredProprity] filter result : true
       3 [main] INFO azkaban.executor.Selector.CandidateFilter  - Final checking result : true
       */
-      Assert.assertTrue(result);
+    Assert.assertTrue(result);
 
-      // expect false, remaining tmp, priority will also fail but the logic shortcuts when the Tmp size check Fails.
-      result = mFilter.filterTarget(this.getExecutorByName("Executor8"), dispatchingObj);
+    // expect false, remaining tmp, priority will also fail but the logic shortcuts when the Tmp size check Fails.
+    result = mFilter.filterTarget(this.getExecutorByName("Executor8"), dispatchingObj);
       /*
       4 [main] INFO azkaban.executor.Selector.CandidateFilter  - start checking 'Executor8' with factor filter for 'Mockfilter'
       4 [main] INFO azkaban.executor.Selector.CandidateFilter  - [Factor: requiredRemainingMemory] filter result : true
       4 [main] INFO azkaban.executor.Selector.CandidateFilter  - [Factor: requiredRemainingTmpSpace] filter result : false
       4 [main] INFO azkaban.executor.Selector.CandidateFilter  - Final checking result : false
       */
-      Assert.assertFalse(result);
+    Assert.assertFalse(result);
 
   }
 
   @Test
   public void testExecutorFilterWithNullInputs() throws Exception {
-    MockFilter filter = new MockFilter();
+    final MockFilter filter = new MockFilter();
     filter.registerFilterforPriority();
     filter.registerFilterforRemainingMemory();
     filter.registerFilterforRemainingTmpSpace();
     filter.registerFilterforTotalMemory();
     boolean result = false;
     try {
-        result = filter.filterTarget(this.getExecutorByName("Executor1"), null);
-      } catch (Exception ex){
-        Assert.fail("no exception should be thrown when null value is passed to the filter.");
-      }
+      result = filter.filterTarget(this.getExecutorByName("Executor1"), null);
+    } catch (final Exception ex) {
+      Assert.fail("no exception should be thrown when null value is passed to the filter.");
+    }
     // note : the FactorFilter logic will decide whether true or false should be returned when null value
     //        is passed, for the Mock class it returns false.
     Assert.assertFalse(result);
 
     try {
-        result = filter.filterTarget(null, null);
-      } catch (Exception ex){
-        Assert.fail("no exception should be thrown when null value is passed to the filter.");
-      }
+      result = filter.filterTarget(null, null);
+    } catch (final Exception ex) {
+      Assert.fail("no exception should be thrown when null value is passed to the filter.");
+    }
     // note : the FactorFilter logic will decide whether true or false should be returned when null value
     //        is passed, for the Mock class it returns false.
     Assert.assertFalse(result);
@@ -363,37 +184,37 @@ public class SelectorTest {
 
   @Test
   public void testExecutorComparer() throws Exception {
-    MockComparator comparator = new MockComparator();
+    final MockComparator comparator = new MockComparator();
     comparator.registerComparerForMemory(5);
 
     MockExecutorObject nextExecutor = Collections.max(this.executorList, comparator);
 
     // expect the first item to be selected, memory wise it is the max.
-    Assert.assertEquals(this.getExecutorByName("Executor11"),nextExecutor);
+    Assert.assertEquals(this.getExecutorByName("Executor11"), nextExecutor);
 
     // add the priority factor.
     // expect again the #9 item to be selected.
     comparator.registerComparerForPriority(6);
     nextExecutor = Collections.max(this.executorList, comparator);
-    Assert.assertEquals(this.getExecutorByName("Executor12"),nextExecutor);
+    Assert.assertEquals(this.getExecutorByName("Executor12"), nextExecutor);
 
     // add the remaining space factor.
     // expect the #12 item to be returned.
     comparator.registerComparerForRemainingSpace(3);
     nextExecutor = Collections.max(this.executorList, comparator);
-    Assert.assertEquals(this.getExecutorByName("Executor12"),nextExecutor);
+    Assert.assertEquals(this.getExecutorByName("Executor12"), nextExecutor);
   }
 
   @Test
   public void testExecutorComparerResisterComparerWInvalidWeight() throws Exception {
-    MockComparator comparator = new MockComparator();
+    final MockComparator comparator = new MockComparator();
     comparator.registerComparerForMemory(0);
   }
 
   @Test
   public void testSelector() throws Exception {
-    MockFilter filter = new MockFilter();
-    MockComparator comparator = new MockComparator();
+    final MockFilter filter = new MockFilter();
+    final MockComparator comparator = new MockComparator();
 
     filter.registerFilterforPriority();
     filter.registerFilterforRemainingMemory();
@@ -404,26 +225,27 @@ public class SelectorTest {
     comparator.registerComparerForPriority(5);
     comparator.registerComparerForRemainingSpace(3);
 
-    CandidateSelector<MockExecutorObject,MockFlowObject> morkSelector = new CandidateSelector<>(filter, comparator);
+    final CandidateSelector<MockExecutorObject, MockFlowObject> morkSelector = new CandidateSelector<>(
+        filter, comparator);
 
     // mock object, remaining memory 11500, total memory 3095, remainingTmpSpace 4200, priority 2.
-    MockFlowObject  dispatchingObj = new MockFlowObject("flow1",3096, 1500,4200,2);
+    MockFlowObject dispatchingObj = new MockFlowObject("flow1", 3096, 1500, 4200, 2);
 
     // expected selection = #12
     MockExecutorObject nextExecutor = morkSelector.getBest(this.executorList, dispatchingObj);
-    Assert.assertEquals(this.getExecutorByName("Executor1"),nextExecutor);
+    Assert.assertEquals(this.getExecutorByName("Executor1"), nextExecutor);
 
-   // remaining memory 11500, total memory 3095, remainingTmpSpace 14200, priority 2.
-   dispatchingObj = new MockFlowObject("flow1",3096, 1500,14200,2);
-   // all candidates should be filtered by the remaining memory.
-   nextExecutor = morkSelector.getBest(this.executorList, dispatchingObj);
-   Assert.assertEquals(null,nextExecutor);
+    // remaining memory 11500, total memory 3095, remainingTmpSpace 14200, priority 2.
+    dispatchingObj = new MockFlowObject("flow1", 3096, 1500, 14200, 2);
+    // all candidates should be filtered by the remaining memory.
+    nextExecutor = morkSelector.getBest(this.executorList, dispatchingObj);
+    Assert.assertEquals(null, nextExecutor);
   }
 
   @Test
   public void testSelectorsignleCandidate() throws Exception {
-    MockFilter filter = new MockFilter();
-    MockComparator comparator = new MockComparator();
+    final MockFilter filter = new MockFilter();
+    final MockComparator comparator = new MockComparator();
 
     filter.registerFilterforPriority();
     filter.registerFilterforRemainingMemory();
@@ -434,13 +256,16 @@ public class SelectorTest {
     comparator.registerComparerForPriority(4);
     comparator.registerComparerForRemainingSpace(1);
 
-    CandidateSelector<MockExecutorObject,MockFlowObject> morkSelector = new CandidateSelector<>(filter, comparator);
+    final CandidateSelector<MockExecutorObject, MockFlowObject> morkSelector = new CandidateSelector<>(
+        filter, comparator);
 
-    ArrayList<MockExecutorObject> signleExecutorList = new ArrayList<>();
-    MockExecutorObject signleExecutor = new MockExecutorObject("ExecutorX",8080,50.0,2048,3,new Date(), 20, 6400);
+    final ArrayList<MockExecutorObject> signleExecutorList = new ArrayList<>();
+    final MockExecutorObject signleExecutor = new MockExecutorObject("ExecutorX", 8080, 50.0, 2048,
+        3,
+        new Date(), 20, 6400);
     signleExecutorList.add(signleExecutor);
 
-    MockFlowObject  dispatchingObj = new MockFlowObject("flow1",100, 100,100,5);
+    final MockFlowObject dispatchingObj = new MockFlowObject("flow1", 100, 100, 100, 5);
     MockExecutorObject executor = morkSelector.getBest(signleExecutorList, dispatchingObj);
     // expected to see null result, as the only executor is filtered out .
     Assert.assertTrue(null == executor);
@@ -453,8 +278,8 @@ public class SelectorTest {
 
   @Test
   public void testSelectorListWithItemsThatAreReferenceEqual() throws Exception {
-    MockFilter filter = new MockFilter();
-    MockComparator comparator = new MockComparator();
+    final MockFilter filter = new MockFilter();
+    final MockComparator comparator = new MockComparator();
 
     filter.registerFilterforPriority();
     filter.registerFilterforRemainingMemory();
@@ -465,21 +290,24 @@ public class SelectorTest {
     comparator.registerComparerForPriority(4);
     comparator.registerComparerForRemainingSpace(1);
 
-    CandidateSelector<MockExecutorObject,MockFlowObject> morkSelector = new CandidateSelector<>(filter, comparator);
+    final CandidateSelector<MockExecutorObject, MockFlowObject> morkSelector = new CandidateSelector<>(
+        filter, comparator);
 
-    ArrayList<MockExecutorObject> list = new ArrayList<>();
-    MockExecutorObject signleExecutor = new MockExecutorObject("ExecutorX",8080,50.0,2048,3,new Date(), 20, 6400);
+    final ArrayList<MockExecutorObject> list = new ArrayList<>();
+    final MockExecutorObject signleExecutor = new MockExecutorObject("ExecutorX", 8080, 50.0, 2048,
+        3,
+        new Date(), 20, 6400);
     list.add(signleExecutor);
     list.add(signleExecutor);
-    MockFlowObject  dispatchingObj = new MockFlowObject("flow1",100, 100,100,3);
-    MockExecutorObject executor = morkSelector.getBest(list, dispatchingObj);
+    final MockFlowObject dispatchingObj = new MockFlowObject("flow1", 100, 100, 100, 3);
+    final MockExecutorObject executor = morkSelector.getBest(list, dispatchingObj);
     Assert.assertTrue(signleExecutor == executor);
   }
 
   @Test
   public void testSelectorListWithItemsThatAreEqualInValue() throws Exception {
-    MockFilter filter = new MockFilter();
-    MockComparator comparator = new MockComparator();
+    final MockFilter filter = new MockFilter();
+    final MockComparator comparator = new MockComparator();
 
     filter.registerFilterforPriority();
     filter.registerFilterforRemainingMemory();
@@ -490,19 +318,22 @@ public class SelectorTest {
     comparator.registerComparerForPriority(4);
     comparator.registerComparerForRemainingSpace(1);
 
-    CandidateSelector<MockExecutorObject,MockFlowObject> morkSelector = new CandidateSelector<>(filter, comparator);
+    final CandidateSelector<MockExecutorObject, MockFlowObject> morkSelector = new CandidateSelector<>(
+        filter, comparator);
 
     // note - as the tieBreaker set in the MockComparator uses the name value of the executor to do the
     //        final diff therefore we need to set the name differently to make a meaningful test, in real
     //        scenario we may want to use something else (say hash code) to be the bottom line for the tieBreaker
     //        to make a final decision, the purpose of the test here is to prove that for two candidates with
     //        exact value (in the case of test, all values except for the name) the decision result is stable.
-    ArrayList<MockExecutorObject> list = new ArrayList<>();
-    MockExecutorObject executor1 = new MockExecutorObject("ExecutorX", 8080,50.0,2048,3,new Date(), 20, 6400);
-    MockExecutorObject executor2 = new MockExecutorObject("ExecutorX2",8080,50.0,2048,3,new Date(), 20, 6400);
+    final ArrayList<MockExecutorObject> list = new ArrayList<>();
+    final MockExecutorObject executor1 = new MockExecutorObject("ExecutorX", 8080, 50.0, 2048, 3,
+        new Date(), 20, 6400);
+    final MockExecutorObject executor2 = new MockExecutorObject("ExecutorX2", 8080, 50.0, 2048, 3,
+        new Date(), 20, 6400);
     list.add(executor1);
     list.add(executor2);
-    MockFlowObject  dispatchingObj = new MockFlowObject("flow1",100, 100,100,3);
+    final MockFlowObject dispatchingObj = new MockFlowObject("flow1", 100, 100, 100, 3);
     MockExecutorObject executor = morkSelector.getBest(list, dispatchingObj);
     Assert.assertTrue(executor2 == executor);
 
@@ -515,8 +346,8 @@ public class SelectorTest {
 
   @Test
   public void testSelectorEmptyList() throws Exception {
-    MockFilter filter = new MockFilter();
-    MockComparator comparator = new MockComparator();
+    final MockFilter filter = new MockFilter();
+    final MockComparator comparator = new MockComparator();
 
     filter.registerFilterforPriority();
     filter.registerFilterforRemainingMemory();
@@ -527,58 +358,63 @@ public class SelectorTest {
     comparator.registerComparerForPriority(4);
     comparator.registerComparerForRemainingSpace(1);
 
-    CandidateSelector<MockExecutorObject,MockFlowObject> morkSelector = new CandidateSelector<>(filter, comparator);
+    final CandidateSelector<MockExecutorObject, MockFlowObject> morkSelector = new CandidateSelector<>(
+        filter, comparator);
 
-    ArrayList<MockExecutorObject> list = new ArrayList<>();
+    final ArrayList<MockExecutorObject> list = new ArrayList<>();
 
-    MockFlowObject  dispatchingObj = new MockFlowObject("flow1",100, 100,100,5);
+    final MockFlowObject dispatchingObj = new MockFlowObject("flow1", 100, 100, 100, 5);
 
-    MockExecutorObject executor  = null;
+    MockExecutorObject executor = null;
 
     try {
       executor = morkSelector.getBest(list, dispatchingObj);
-      } catch (Exception ex){
-        Assert.fail("no exception should be thrown when an empty list is passed to the Selector.");
-      }
+    } catch (final Exception ex) {
+      Assert.fail("no exception should be thrown when an empty list is passed to the Selector.");
+    }
 
     // expected to see null result.
     Assert.assertTrue(null == executor);
 
     try {
       executor = morkSelector.getBest(list, dispatchingObj);
-      } catch (Exception ex){
-        Assert.fail("no exception should be thrown when null is passed to the Selector as the candidate list.");
-      }
+    } catch (final Exception ex) {
+      Assert.fail(
+          "no exception should be thrown when null is passed to the Selector as the candidate list.");
+    }
 
-      // expected to see null result, as the only executor is filtered out .
-      Assert.assertTrue(null == executor);
+    // expected to see null result, as the only executor is filtered out .
+    Assert.assertTrue(null == executor);
 
   }
 
   @Test
   public void testSelectorListWithNullValue() throws Exception {
-    MockComparator comparator = new MockComparator();
+    final MockComparator comparator = new MockComparator();
 
     comparator.registerComparerForMemory(3);
     comparator.registerComparerForPriority(4);
     comparator.registerComparerForRemainingSpace(1);
 
-    CandidateSelector<MockExecutorObject,MockFlowObject> morkSelector = new CandidateSelector<>(null, comparator);
+    final CandidateSelector<MockExecutorObject, MockFlowObject> morkSelector = new CandidateSelector<>(
+        null, comparator);
 
-    ArrayList<MockExecutorObject> list = new ArrayList<>();
-    MockExecutorObject executor1 = new MockExecutorObject("ExecutorX", 8080,50.0,2048,3,new Date(), 20, 6400);
-    MockExecutorObject executor2 = new MockExecutorObject("ExecutorX2",8080,50.0,2048,3,new Date(), 20, 6400);
+    final ArrayList<MockExecutorObject> list = new ArrayList<>();
+    final MockExecutorObject executor1 = new MockExecutorObject("ExecutorX", 8080, 50.0, 2048, 3,
+        new Date(), 20, 6400);
+    final MockExecutorObject executor2 = new MockExecutorObject("ExecutorX2", 8080, 50.0, 2048, 3,
+        new Date(), 20, 6400);
     list.add(executor1);
     list.add(executor2);
     list.add(null);
 
-    MockFlowObject  dispatchingObj = new MockFlowObject("flow1",100, 100,100,3);
-    MockExecutorObject executor  = null;
+    final MockFlowObject dispatchingObj = new MockFlowObject("flow1", 100, 100, 100, 3);
+    MockExecutorObject executor = null;
     try {
       executor = morkSelector.getBest(list, dispatchingObj);
-      } catch (Exception ex){
-        Assert.fail("no exception should be thrown when an List contains null value.");
-      }
+    } catch (final Exception ex) {
+      Assert.fail("no exception should be thrown when an List contains null value.");
+    }
     Assert.assertTrue(executor2 == executor);
 
     // try to compare null vs null, no exception is expected.
@@ -587,135 +423,351 @@ public class SelectorTest {
     list.add(null);
     try {
       executor = morkSelector.getBest(list, dispatchingObj);
-      } catch (Exception ex){
-        Assert.fail("no exception should be thrown when an List contains multiple null values.");
-      }
+    } catch (final Exception ex) {
+      Assert.fail("no exception should be thrown when an List contains multiple null values.");
+    }
     Assert.assertTrue(null == executor);
 
   }
 
   @Test
-  public void testCreatingExectorfilterObject() throws Exception{
-    List<String> validList = new ArrayList<>(ExecutorFilter.getAvailableFilterNames());
+  public void testCreatingExectorfilterObject() throws Exception {
+    final List<String> validList = new ArrayList<>(ExecutorFilter.getAvailableFilterNames());
     try {
       new ExecutorFilter(validList);
-    }catch (Exception ex){
-      Assert.fail("creating ExecutorFilter with valid list throws exception . ex -" + ex.getMessage());
+    } catch (final Exception ex) {
+      Assert.fail(
+          "creating ExecutorFilter with valid list throws exception . ex -" + ex.getMessage());
     }
   }
 
   @Test
-  public void testCreatingExectorfilterObjectWInvalidList() throws Exception{
-    List<String> invalidList = new ArrayList<>();
+  public void testCreatingExectorfilterObjectWInvalidList() throws Exception {
+    final List<String> invalidList = new ArrayList<>();
     invalidList.add("notExistingFilter");
     Exception result = null;
     try {
       new ExecutorFilter(invalidList);
-    }catch (Exception ex){
-      if (ex instanceof IllegalArgumentException)
-      result = ex;
+    } catch (final Exception ex) {
+      if (ex instanceof IllegalArgumentException) {
+        result = ex;
+      }
     }
     Assert.assertNotNull(result);
   }
 
   @Test
-  public void testCreatingExectorComparatorObject() throws Exception{
-   Map<String,Integer> comparatorMap = new HashMap<>();
-   for (String name : ExecutorComparator.getAvailableComparatorNames()){
-     comparatorMap.put(name, 1);
-   }
-   try {
+  public void testCreatingExectorComparatorObject() throws Exception {
+    final Map<String, Integer> comparatorMap = new HashMap<>();
+    for (final String name : ExecutorComparator.getAvailableComparatorNames()) {
+      comparatorMap.put(name, 1);
+    }
+    try {
       new ExecutorComparator(comparatorMap);
-    }catch (Exception ex){
-      Assert.fail("creating ExecutorComparator with valid list throws exception . ex -" + ex.getMessage());
+    } catch (final Exception ex) {
+      Assert.fail(
+          "creating ExecutorComparator with valid list throws exception . ex -" + ex.getMessage());
     }
   }
 
   @Test
-  public void testCreatingExectorComparatorObjectWInvalidName() throws Exception{
-    Map<String,Integer> comparatorMap = new HashMap<>();
+  public void testCreatingExectorComparatorObjectWInvalidName() throws Exception {
+    final Map<String, Integer> comparatorMap = new HashMap<>();
     comparatorMap.put("invalidName", 0);
     Exception result = null;
     try {
       new ExecutorComparator(comparatorMap);
-    }catch (Exception ex){
-      if (ex instanceof IllegalArgumentException)
-      result = ex;
+    } catch (final Exception ex) {
+      if (ex instanceof IllegalArgumentException) {
+        result = ex;
+      }
     }
     Assert.assertNotNull(result);
   }
 
   @Test
-  public void testCreatingExectorComparatorObjectWInvalidWeight() throws Exception{
-    Map<String,Integer> comparatorMap = new HashMap<>();
-    for (String name : ExecutorComparator.getAvailableComparatorNames()){
+  public void testCreatingExectorComparatorObjectWInvalidWeight() throws Exception {
+    final Map<String, Integer> comparatorMap = new HashMap<>();
+    for (final String name : ExecutorComparator.getAvailableComparatorNames()) {
       comparatorMap.put(name, -1);
     }
     Exception result = null;
     try {
       new ExecutorComparator(comparatorMap);
-    }catch (Exception ex){
-      if (ex instanceof IllegalArgumentException)
-      result = ex;
+    } catch (final Exception ex) {
+      if (ex instanceof IllegalArgumentException) {
+        result = ex;
+      }
     }
     Assert.assertNotNull(result);
   }
 
   @Test
-  public void testCreatingExecutorSelectorWithEmptyFilterComparatorList() throws Exception{
-    List<Executor> executorList = new ArrayList<>();
+  public void testCreatingExecutorSelectorWithEmptyFilterComparatorList() throws Exception {
+    final List<Executor> executorList = new ArrayList<>();
     executorList.add(new Executor(1, "host1", 80, true));
     executorList.add(new Executor(2, "host2", 80, true));
     executorList.add(new Executor(3, "host3", 80, true));
 
-    executorList.get(0).setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 89, 0));
-    executorList.get(1).setExecutorInfo(new ExecutorInfo(50, 14095, 50, System.currentTimeMillis(), 90,  0));
-    executorList.get(2).setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 90,  0));
+    executorList.get(0)
+        .setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 89, 0));
+    executorList.get(1)
+        .setExecutorInfo(new ExecutorInfo(50, 14095, 50, System.currentTimeMillis(), 90, 0));
+    executorList.get(2)
+        .setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 90, 0));
 
-    ExecutableFlow flow = new ExecutableFlow();
+    final ExecutableFlow flow = new ExecutableFlow();
 
-    ExecutorSelector selector = new ExecutorSelector(null , null);
-    Executor executor = selector.getBest(executorList, flow);
+    final ExecutorSelector selector = new ExecutorSelector(null, null);
+    final Executor executor = selector.getBest(executorList, flow);
     Assert.assertEquals(executorList.get(2), executor);
   }
 
-
   @Test
-  public void testExecutorSelectorE2E() throws Exception{
-    List<String> filterList = new ArrayList<>(ExecutorFilter.getAvailableFilterNames());
-    Map<String,Integer> comparatorMap;
+  public void testExecutorSelectorE2E() throws Exception {
+    final List<String> filterList = new ArrayList<>(ExecutorFilter.getAvailableFilterNames());
+    final Map<String, Integer> comparatorMap;
     comparatorMap = new HashMap<>();
-    List<Executor> executorList = new ArrayList<>();
+    final List<Executor> executorList = new ArrayList<>();
     executorList.add(new Executor(1, "host1", 80, true));
     executorList.add(new Executor(2, "host2", 80, true));
     executorList.add(new Executor(3, "host3", 80, true));
 
-    executorList.get(0).setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 89, 0));
-    executorList.get(1).setExecutorInfo(new ExecutorInfo(50, 14095, 50, System.currentTimeMillis(), 90,  0));
-    executorList.get(2).setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 90,  0));
+    executorList.get(0)
+        .setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 89, 0));
+    executorList.get(1)
+        .setExecutorInfo(new ExecutorInfo(50, 14095, 50, System.currentTimeMillis(), 90, 0));
+    executorList.get(2)
+        .setExecutorInfo(new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 90, 0));
 
-    ExecutableFlow flow = new ExecutableFlow();
+    final ExecutableFlow flow = new ExecutableFlow();
 
-    for (String name : ExecutorComparator.getAvailableComparatorNames()){
+    for (final String name : ExecutorComparator.getAvailableComparatorNames()) {
       comparatorMap.put(name, 1);
     }
-    ExecutorSelector selector = new ExecutorSelector(filterList,comparatorMap);
+    final ExecutorSelector selector = new ExecutorSelector(filterList, comparatorMap);
     Executor executor = selector.getBest(executorList, flow);
     Assert.assertEquals(executorList.get(0), executor);
 
     // simulate that once the flow is assigned, executor1's remaining TMP storage dropped to 2048
     // now we do the getBest again executor3 is expected to be selected as it has a earlier last dispatched time.
-    executorList.get(0).setExecutorInfo(new ExecutorInfo(99.9, 4095, 50, System.currentTimeMillis(), 90, 1));
+    executorList.get(0)
+        .setExecutorInfo(new ExecutorInfo(99.9, 4095, 50, System.currentTimeMillis(), 90, 1));
     executor = selector.getBest(executorList, flow);
     Assert.assertEquals(executorList.get(2), executor);
   }
 
   @Test
-  public void  testExecutorInfoJsonParser() throws Exception{
-    ExecutorInfo exeInfo = new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 89, 10);
-    String json = JSONUtils.toJSON(exeInfo);
-    ExecutorInfo exeInfo2 = ExecutorInfo.fromJSONString(json);
+  public void testExecutorInfoJsonParser() throws Exception {
+    final ExecutorInfo exeInfo = new ExecutorInfo(99.9, 14095, 50, System.currentTimeMillis(), 89,
+        10);
+    final String json = JSONUtils.toJSON(exeInfo);
+    final ExecutorInfo exeInfo2 = ExecutorInfo.fromJSONString(json);
     Assert.assertTrue(exeInfo.equals(exeInfo2));
   }
 
+  // mock executor object.
+  static class MockExecutorObject implements Comparable<MockExecutorObject> {
+
+    public String name;
+    public int port;
+    public double percentOfRemainingMemory;
+    public int amountOfRemainingMemory;
+    public int priority;
+    public Date lastAssigned;
+    public double percentOfRemainingFlowcapacity;
+    public int remainingTmp;
+
+    public MockExecutorObject(final String name,
+        final int port,
+        final double percentOfRemainingMemory,
+        final int amountOfRemainingMemory,
+        final int priority,
+        final Date lastAssigned,
+        final double percentOfRemainingFlowcapacity,
+        final int remainingTmp) {
+      this.name = name;
+      this.port = port;
+      this.percentOfRemainingMemory = percentOfRemainingMemory;
+      this.amountOfRemainingMemory = amountOfRemainingMemory;
+      this.priority = priority;
+      this.lastAssigned = lastAssigned;
+      this.percentOfRemainingFlowcapacity = percentOfRemainingFlowcapacity;
+      this.remainingTmp = remainingTmp;
+    }
+
+    @Override
+    public String toString() {
+      return this.name;
+    }
+
+    @Override
+    public int compareTo(final MockExecutorObject o) {
+      return null == o ? 1 : this.hashCode() - o.hashCode();
+    }
+  }
+
+  // Mock flow object.
+  static class MockFlowObject {
+
+    public String name;
+    public int requiredRemainingMemory;
+    public int requiredTotalMemory;
+    public int requiredRemainingTmpSpace;
+    public int priority;
+
+    public MockFlowObject(final String name,
+        final int requiredTotalMemory,
+        final int requiredRemainingMemory,
+        final int requiredRemainingTmpSpace,
+        final int priority) {
+      this.name = name;
+      this.requiredTotalMemory = requiredTotalMemory;
+      this.requiredRemainingMemory = requiredRemainingMemory;
+      this.requiredRemainingTmpSpace = requiredRemainingTmpSpace;
+      this.priority = priority;
+    }
+
+    @Override
+    public String toString() {
+      return this.name;
+    }
+  }
+
+  // mock Filter class.
+  static class MockFilter
+      extends CandidateFilter<MockExecutorObject, MockFlowObject> {
+
+    public MockFilter() {
+    }
+
+    @Override
+    public String getName() {
+      return "Mockfilter";
+    }
+
+    // function to register the remainingMemory filter.
+    // for test purpose the registration is put in a separated method, in production the work should be done
+    // in the constructor.
+    public void registerFilterforTotalMemory() {
+      this.registerFactorFilter(
+          FactorFilter.create("requiredTotalMemory", (itemToCheck, sourceObject) -> {
+            // REAL LOGIC COMES HERE -
+            if (null == itemToCheck || null == sourceObject) {
+              return false;
+            }
+
+            // Box has infinite memory.:)
+            if (itemToCheck.percentOfRemainingMemory == 0) {
+              return true;
+            }
+
+            // calculate the memory and return.
+            return itemToCheck.amountOfRemainingMemory / itemToCheck.percentOfRemainingMemory * 100
+                >
+                sourceObject.requiredTotalMemory;
+          }));
+    }
+
+    public void registerFilterforRemainingMemory() {
+      this.registerFactorFilter(
+          FactorFilter.create("requiredRemainingMemory", (itemToCheck, sourceObject) -> {
+            // REAL LOGIC COMES HERE -
+            if (null == itemToCheck || null == sourceObject) {
+              return false;
+            }
+            return itemToCheck.amountOfRemainingMemory > sourceObject.requiredRemainingMemory;
+          }));
+    }
+
+    public void registerFilterforPriority() {
+      this.registerFactorFilter(
+          FactorFilter.create("requiredProprity", (itemToCheck, sourceObject) -> {
+            // REAL LOGIC COMES HERE -
+            if (null == itemToCheck || null == sourceObject) {
+              return false;
+            }
+
+            // priority value, the bigger the lower.
+            return itemToCheck.priority >= sourceObject.priority;
+          }));
+    }
+
+    public void registerFilterforRemainingTmpSpace() {
+      this.registerFactorFilter(
+          FactorFilter.create("requiredRemainingTmpSpace", (itemToCheck, sourceObject) -> {
+            // REAL LOGIC COMES HERE -
+            if (null == itemToCheck || null == sourceObject) {
+              return false;
+            }
+
+            return itemToCheck.remainingTmp > sourceObject.requiredRemainingTmpSpace;
+          }));
+    }
+
+  }
+
+  // mock comparator class.
+  static class MockComparator
+      extends CandidateComparator<MockExecutorObject> {
+
+    public MockComparator() {
+    }
+
+    @Override
+    public String getName() {
+      return "MockComparator";
+    }
+
+    @Override
+    protected boolean tieBreak(final MockExecutorObject object1, final MockExecutorObject object2) {
+      if (null == object2) {
+        return true;
+      }
+      if (null == object1) {
+        return false;
+      }
+      return object1.name.compareTo(object2.name) >= 0;
+    }
+
+    public void registerComparerForMemory(final int weight) {
+      this.registerFactorComparator(FactorComparator.create("Memory", weight, (o1, o2) -> {
+        int result = 0;
+
+        // check remaining amount of memory.
+        result = o1.amountOfRemainingMemory - o2.amountOfRemainingMemory;
+        if (result != 0) {
+          return result > 0 ? 1 : -1;
+        }
+
+        // check remaining % .
+        result = (int) (o1.percentOfRemainingMemory - o2.percentOfRemainingMemory);
+        return result == 0 ? 0 : result > 0 ? 1 : -1;
+
+      }));
+    }
+
+    public void registerComparerForRemainingSpace(final int weight) {
+      this.registerFactorComparator(FactorComparator.create("RemainingTmp", weight, (o1, o2) -> {
+        int result = 0;
+
+        // check remaining % .
+        result = (int) (o1.remainingTmp - o2.remainingTmp);
+        return result == 0 ? 0 : result > 0 ? 1 : -1;
+
+      }));
+    }
+
+    public void registerComparerForPriority(final int weight) {
+      this.registerFactorComparator(FactorComparator.create("Priority", weight, (o1, o2) -> {
+        int result = 0;
+
+        // check priority, bigger the better.
+        result = (int) (o1.priority - o2.priority);
+        return result == 0 ? 0 : result > 0 ? 1 : -1;
+
+      }));
+    }
+  }
+
 }
diff --git a/azkaban-common/src/test/java/azkaban/executor/SleepJavaJob.java b/azkaban-common/src/test/java/azkaban/executor/SleepJavaJob.java
index 6b447dc..15a4b00 100644
--- a/azkaban-common/src/test/java/azkaban/executor/SleepJavaJob.java
+++ b/azkaban-common/src/test/java/azkaban/executor/SleepJavaJob.java
@@ -22,78 +22,79 @@ import java.util.Map;
 import java.util.Properties;
 
 public class SleepJavaJob {
+
   private boolean fail;
   private String seconds;
   private int attempts;
   private int currentAttempt;
 
-  public SleepJavaJob(String id, Properties props) {
+  public SleepJavaJob(final String id, final Properties props) {
     setup(props);
   }
 
-  public SleepJavaJob(String id, Map<String, String> parameters) {
-    Properties properties = new Properties();
+  public SleepJavaJob(final String id, final Map<String, String> parameters) {
+    final Properties properties = new Properties();
     properties.putAll(parameters);
 
     setup(properties);
   }
 
-  private void setup(Properties props) {
-    String failStr = (String) props.get("fail");
+  public static void main(final String[] args) throws Exception {
+    final String propsFile = System.getenv("JOB_PROP_FILE");
+    final Properties prop = new Properties();
+    prop.load(new BufferedReader(new FileReader(propsFile)));
+
+    final String jobName = System.getenv("JOB_NAME");
+    final SleepJavaJob job = new SleepJavaJob(jobName, prop);
+
+    job.run();
+  }
+
+  private void setup(final Properties props) {
+    final String failStr = (String) props.get("fail");
 
     if (failStr == null || failStr.equals("false")) {
-      fail = false;
+      this.fail = false;
     } else {
-      fail = true;
+      this.fail = true;
     }
 
-    currentAttempt =
+    this.currentAttempt =
         props.containsKey("azkaban.job.attempt") ? Integer
             .parseInt((String) props.get("azkaban.job.attempt")) : 0;
-    String attemptString = (String) props.get("passRetry");
+    final String attemptString = (String) props.get("passRetry");
     if (attemptString == null) {
-      attempts = -1;
+      this.attempts = -1;
     } else {
-      attempts = Integer.valueOf(attemptString);
+      this.attempts = Integer.valueOf(attemptString);
     }
-    seconds = (String) props.get("seconds");
+    this.seconds = (String) props.get("seconds");
 
-    if (fail) {
-      System.out.println("Planning to fail after " + seconds
-          + " seconds. Attempts left " + currentAttempt + " of " + attempts);
+    if (this.fail) {
+      System.out.println("Planning to fail after " + this.seconds
+          + " seconds. Attempts left " + this.currentAttempt + " of " + this.attempts);
     } else {
-      System.out.println("Planning to succeed after " + seconds + " seconds.");
+      System.out.println("Planning to succeed after " + this.seconds + " seconds.");
     }
   }
 
-  public static void main(String[] args) throws Exception {
-    String propsFile = System.getenv("JOB_PROP_FILE");
-    Properties prop = new Properties();
-    prop.load(new BufferedReader(new FileReader(propsFile)));
-
-    String jobName = System.getenv("JOB_NAME");
-    SleepJavaJob job = new SleepJavaJob(jobName, prop);
-
-    job.run();
-  }
-
   public void run() throws Exception {
-    if (seconds == null) {
+    if (this.seconds == null) {
       throw new RuntimeException("Seconds not set");
     }
 
-    int sec = Integer.parseInt(seconds);
+    final int sec = Integer.parseInt(this.seconds);
     System.out.println("Sec " + sec);
     synchronized (this) {
       try {
         this.wait(sec * 1000);
-      } catch (InterruptedException e) {
-        System.out.println("Interrupted " + fail);
+      } catch (final InterruptedException e) {
+        System.out.println("Interrupted " + this.fail);
       }
     }
 
-    if (fail) {
-      if (attempts <= 0 || currentAttempt <= attempts) {
+    if (this.fail) {
+      if (this.attempts <= 0 || this.currentAttempt <= this.attempts) {
         throw new Exception("I failed because I had to.");
       }
     }
@@ -101,7 +102,7 @@ public class SleepJavaJob {
 
   public void cancel() throws Exception {
     System.out.println("Cancelled called on Sleep job");
-    fail = true;
+    this.fail = true;
     synchronized (this) {
       this.notifyAll();
     }
diff --git a/azkaban-common/src/test/java/azkaban/jobcallback/JobCallbackValidatorTest.java b/azkaban-common/src/test/java/azkaban/jobcallback/JobCallbackValidatorTest.java
index 9741b6a..857d04b 100644
--- a/azkaban-common/src/test/java/azkaban/jobcallback/JobCallbackValidatorTest.java
+++ b/azkaban-common/src/test/java/azkaban/jobcallback/JobCallbackValidatorTest.java
@@ -4,143 +4,142 @@ import static azkaban.jobcallback.JobCallbackConstants.DEFAULT_MAX_CALLBACK_COUN
 import static azkaban.jobcallback.JobCallbackConstants.MAX_CALLBACK_COUNT_PROPERTY_KEY;
 import static azkaban.jobcallback.JobCallbackConstants.MAX_POST_BODY_LENGTH_PROPERTY_KEY;
 
+import azkaban.utils.Props;
 import java.util.HashSet;
 import java.util.Set;
-
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import azkaban.utils.Props;
-
 public class JobCallbackValidatorTest {
+
   private Props serverProps;
 
   @Before
   public void setup() {
-    serverProps = new Props();
-    serverProps
+    this.serverProps = new Props();
+    this.serverProps
         .put(MAX_CALLBACK_COUNT_PROPERTY_KEY, DEFAULT_MAX_CALLBACK_COUNT);
   }
 
   @Test
   public void noJobCallbackProps() {
-    Props jobProps = new Props();
-    Set<String> errors = new HashSet<String>();
+    final Props jobProps = new Props();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(0, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(0, errors.size());
   }
 
   @Test
   public void sequenceStartWithZeroProps() {
-    Props jobProps = new Props();
-    Set<String> errors = new HashSet<String>();
+    final Props jobProps = new Props();
+    final Set<String> errors = new HashSet<>();
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".0.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".0.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.COMPLETED.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.COMPLETED.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
     Assert.assertEquals(1, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(1, errors.size());
   }
 
   @Test
   public void oneGetJobCallback() {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
-    Set<String> errors = new HashSet<String>();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(1, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(0, errors.size());
   }
 
   @Test
   public void onePostJobCallback() {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
         JobCallbackConstants.HTTP_POST);
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.body",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.body",
         "doc:id");
 
-    Set<String> errors = new HashSet<String>();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(1, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(0, errors.size());
   }
 
   @Test
   public void multiplePostJobCallbacks() {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
         JobCallbackConstants.HTTP_POST);
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.body",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.body",
         "doc:id");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.url",
         "http://www.linkedin2.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.method",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.method",
         JobCallbackConstants.HTTP_POST);
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.body",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.body",
         "doc2:id");
 
-    Set<String> errors = new HashSet<String>();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(2, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(0, errors.size());
   }
 
   @Test
   public void noPostBodyJobCallback() {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
         JobCallbackConstants.HTTP_POST);
 
-    Set<String> errors = new HashSet<String>();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(0, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(1, errors.size());
     System.out.println(errors);
@@ -148,42 +147,42 @@ public class JobCallbackValidatorTest {
 
   @Test
   public void multipleGetJobCallbacks() {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
-    Set<String> errors = new HashSet<String>();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(2, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(0, errors.size());
   }
 
   @Test
   public void multipleGetJobCallbackWithGap() {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".2.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".2.url",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".2.url",
         "http://www.linkedin.com");
 
-    Set<String> errors = new HashSet<String>();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(2, JobCallbackValidator.validate("bogusJob",
-        serverProps, jobProps, errors));
+        this.serverProps, jobProps, errors));
 
     Assert.assertEquals(0, errors.size());
   }
@@ -191,27 +190,27 @@ public class JobCallbackValidatorTest {
   @Test
   public void postBodyLengthTooLargeTest() {
 
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.url",
         "http://www.linkedin.com");
 
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.method",
         JobCallbackConstants.HTTP_POST);
 
-    String postBodyValue = "abcdefghijklmnopqrstuvwxyz";
+    final String postBodyValue = "abcdefghijklmnopqrstuvwxyz";
 
-    int postBodyLength = 20;
+    final int postBodyLength = 20;
     Assert.assertTrue(postBodyValue.length() > postBodyLength);
     jobProps.put("job.notification."
-        + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.body",
+            + JobCallbackStatusEnum.FAILURE.name().toLowerCase() + ".1.body",
         postBodyValue);
 
-    Props localServerProps = new Props();
+    final Props localServerProps = new Props();
     localServerProps.put(MAX_POST_BODY_LENGTH_PROPERTY_KEY, postBodyLength);
 
-    Set<String> errors = new HashSet<String>();
+    final Set<String> errors = new HashSet<>();
 
     Assert.assertEquals(0, JobCallbackValidator.validate("bogusJob",
         localServerProps, jobProps, errors));
diff --git a/azkaban-common/src/test/java/azkaban/jobExecutor/AllJobExecutorTests.java b/azkaban-common/src/test/java/azkaban/jobExecutor/AllJobExecutorTests.java
index 6a204ea..f3b5ea6 100644
--- a/azkaban-common/src/test/java/azkaban/jobExecutor/AllJobExecutorTests.java
+++ b/azkaban-common/src/test/java/azkaban/jobExecutor/AllJobExecutorTests.java
@@ -21,9 +21,9 @@ import azkaban.utils.Props;
 
 class AllJobExecutorTests {
 
-  static Props setUpCommonProps(){
+  static Props setUpCommonProps() {
 
-    Props props = new Props();
+    final Props props = new Props();
     props.put("fullPath", ".");
     props.put(CommonJobProperties.PROJECT_NAME, "test_project");
     props.put(CommonJobProperties.FLOW_ID, "test_flow");
diff --git a/azkaban-common/src/test/java/azkaban/jobExecutor/JavaProcessJobTest.java b/azkaban-common/src/test/java/azkaban/jobExecutor/JavaProcessJobTest.java
index a6f27f4..5a3fd43 100644
--- a/azkaban-common/src/test/java/azkaban/jobExecutor/JavaProcessJobTest.java
+++ b/azkaban-common/src/test/java/azkaban/jobExecutor/JavaProcessJobTest.java
@@ -34,17 +34,6 @@ import org.junit.rules.TemporaryFolder;
 
 
 public class JavaProcessJobTest {
-  @ClassRule
-  public static TemporaryFolder classTemp = new TemporaryFolder();
-
-  @Rule
-  public TemporaryFolder temp = new TemporaryFolder();
-
-  private JavaProcessJob job = null;
-  private Props props = null;
-  private Logger log = Logger.getLogger(JavaProcessJob.class);
-
-  private static String classPaths;
 
   private static final String inputContent =
       "Quick Change in Strategy for a Bookseller \n"
@@ -54,7 +43,6 @@ public class JavaProcessJobTest {
           + "Twelve years later, it may be Joe Fox's turn to worry. Readers have gone from skipping small \n"
           + "bookstores to wondering if they need bookstores at all. More people are ordering books online  \n"
           + "or plucking them from the best-seller bin at Wal-Mart";
-
   private static final String errorInputContent =
       inputContent
           + "\n stop_here "
@@ -64,20 +52,27 @@ public class JavaProcessJobTest {
           + "to the Association of American Publishers, spurred by sales of the Amazon Kindle and the new Apple iPad. \n"
           + "For Barnes & Noble, long the largest and most powerful bookstore chain in the country, the new competition \n"
           + "has led to declining profits and store traffic.";
-
+  @ClassRule
+  public static TemporaryFolder classTemp = new TemporaryFolder();
+  private static String classPaths;
   private static String inputFile;
   private static String errorInputFile;
   private static String outputFile;
+  private final Logger log = Logger.getLogger(JavaProcessJob.class);
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+  private JavaProcessJob job = null;
+  private Props props = null;
 
   @BeforeClass
   public static void init() throws IOException {
     azkaban.test.Utils.initServiceProvider();
     // Get the classpath
-    Properties prop = System.getProperties();
+    final Properties prop = System.getProperties();
     classPaths =
         String.format("'%s'", prop.getProperty("java.class.path", null));
 
-    long time = (new Date()).getTime();
+    final long time = (new Date()).getTime();
     inputFile = classTemp.newFile("azkaban_input_" + time).getCanonicalPath();
     errorInputFile =
         classTemp.newFile("azkaban_input_error_" + time).getCanonicalPath();
@@ -87,7 +82,7 @@ public class JavaProcessJobTest {
     try {
       Utils.dumpFile(inputFile, inputContent);
       Utils.dumpFile(errorInputFile, errorInputContent);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       e.printStackTrace(System.err);
       Assert.fail("error in creating input file:" + e.getLocalizedMessage());
     }
@@ -100,55 +95,55 @@ public class JavaProcessJobTest {
 
   @Before
   public void setUp() throws IOException {
-    File workingDir = temp.newFolder("testJavaProcess");
+    final File workingDir = this.temp.newFolder("testJavaProcess");
 
     // Initialize job
-    props = AllJobExecutorTests.setUpCommonProps();
-    props.put(AbstractProcessJob.WORKING_DIR, workingDir.getCanonicalPath());
-    props.put("type", "java");
+    this.props = AllJobExecutorTests.setUpCommonProps();
+    this.props.put(AbstractProcessJob.WORKING_DIR, workingDir.getCanonicalPath());
+    this.props.put("type", "java");
 
-    job = new JavaProcessJob("testJavaProcess", props, props, log);
+    this.job = new JavaProcessJob("testJavaProcess", this.props, this.props, this.log);
   }
 
   @After
   public void tearDown() {
-    temp.delete();
+    this.temp.delete();
   }
 
   @Test
   public void testJavaJob() throws Exception {
     // initialize the Props
-    props.put(JavaProcessJob.JAVA_CLASS,
+    this.props.put(JavaProcessJob.JAVA_CLASS,
         "azkaban.jobExecutor.WordCountLocal");
-    props.put("input", inputFile);
-    props.put("output", outputFile);
-    props.put("classpath", classPaths);
-    job.run();
+    this.props.put("input", inputFile);
+    this.props.put("output", outputFile);
+    this.props.put("classpath", classPaths);
+    this.job.run();
   }
 
   @Test
   public void testJavaJobHashmap() throws Exception {
     // initialize the Props
-    props.put(JavaProcessJob.JAVA_CLASS,
+    this.props.put(JavaProcessJob.JAVA_CLASS,
         "azkaban.executor.SleepJavaJob");
-    props.put("seconds", 1);
-    props.put("input", inputFile);
-    props.put("output", outputFile);
-    props.put("classpath", classPaths);
-    job.run();
+    this.props.put("seconds", 1);
+    this.props.put("input", inputFile);
+    this.props.put("output", outputFile);
+    this.props.put("classpath", classPaths);
+    this.job.run();
   }
 
   @Test
   public void testFailedJavaJob() throws Exception {
-    props.put(JavaProcessJob.JAVA_CLASS,
+    this.props.put(JavaProcessJob.JAVA_CLASS,
         "azkaban.jobExecutor.WordCountLocal");
-    props.put("input", errorInputFile);
-    props.put("output", outputFile);
-    props.put("classpath", classPaths);
+    this.props.put("input", errorInputFile);
+    this.props.put("output", outputFile);
+    this.props.put("classpath", classPaths);
 
     try {
-      job.run();
-    } catch (RuntimeException e) {
+      this.job.run();
+    } catch (final RuntimeException e) {
       Assert.assertTrue(true);
     }
   }
diff --git a/azkaban-common/src/test/java/azkaban/jobExecutor/ProcessJobTest.java b/azkaban-common/src/test/java/azkaban/jobExecutor/ProcessJobTest.java
index 46ed2fe..e6eb5e8 100644
--- a/azkaban-common/src/test/java/azkaban/jobExecutor/ProcessJobTest.java
+++ b/azkaban-common/src/test/java/azkaban/jobExecutor/ProcessJobTest.java
@@ -31,12 +31,12 @@ import org.junit.rules.TemporaryFolder;
 
 
 public class ProcessJobTest {
+
+  private final Logger log = Logger.getLogger(ProcessJob.class);
   @Rule
   public TemporaryFolder temp = new TemporaryFolder();
-
   private ProcessJob job = null;
   private Props props = null;
-  private Logger log = Logger.getLogger(ProcessJob.class);
 
   @BeforeClass
   public static void classInit() throws Exception {
@@ -46,68 +46,66 @@ public class ProcessJobTest {
 
   @Before
   public void setUp() throws IOException {
-    File workingDir = temp.newFolder("TestProcess");
+    final File workingDir = this.temp.newFolder("TestProcess");
 
     // Initialize job
-    props = AllJobExecutorTests.setUpCommonProps();
-    props.put(AbstractProcessJob.WORKING_DIR, workingDir.getCanonicalPath());
-    props.put("type", "command");
+    this.props = AllJobExecutorTests.setUpCommonProps();
+    this.props.put(AbstractProcessJob.WORKING_DIR, workingDir.getCanonicalPath());
+    this.props.put("type", "command");
 
-    job = new ProcessJob("TestProcess", props, props, log);
+    this.job = new ProcessJob("TestProcess", this.props, this.props, this.log);
   }
 
   @After
   public void tearDown() {
-    temp.delete();
+    this.temp.delete();
   }
 
   @Test
   public void testOneUnixCommand() throws Exception {
     // Initialize the Props
-    props.put(ProcessJob.COMMAND, "ls -al");
-    job.run();
+    this.props.put(ProcessJob.COMMAND, "ls -al");
+    this.job.run();
 
   }
 
   /**
    * this job should run fine if the props contain user.to.proxy
-   * @throws Exception
    */
   @Test
   public void testOneUnixCommandWithProxyUserInsteadOfSubmitUser() throws Exception {
 
     // Initialize the Props
-    props.removeLocal(CommonJobProperties.SUBMIT_USER);
-    props.put("user.to.proxy", "test_user");
-    props.put(ProcessJob.COMMAND, "ls -al");
+    this.props.removeLocal(CommonJobProperties.SUBMIT_USER);
+    this.props.put("user.to.proxy", "test_user");
+    this.props.put(ProcessJob.COMMAND, "ls -al");
 
-    job.run();
+    this.job.run();
 
   }
 
   /**
    * this job should fail because there is no user.to.proxy and no CommonJobProperties.SUBMIT_USER
-   * @throws Exception
    */
-  @Test (expected=RuntimeException.class)
+  @Test(expected = RuntimeException.class)
   public void testOneUnixCommandWithNoUser() throws Exception {
 
     // Initialize the Props
-    props.removeLocal(CommonJobProperties.SUBMIT_USER);
-    props.put(ProcessJob.COMMAND, "ls -al");
+    this.props.removeLocal(CommonJobProperties.SUBMIT_USER);
+    this.props.put(ProcessJob.COMMAND, "ls -al");
 
-    job.run();
+    this.job.run();
 
   }
 
   @Test
   public void testFailedUnixCommand() throws Exception {
     // Initialize the Props
-    props.put(ProcessJob.COMMAND, "xls -al");
+    this.props.put(ProcessJob.COMMAND, "xls -al");
 
     try {
-      job.run();
-    } catch (RuntimeException e) {
+      this.job.run();
+    } catch (final RuntimeException e) {
       Assert.assertTrue(true);
       e.printStackTrace();
     }
@@ -116,26 +114,26 @@ public class ProcessJobTest {
   @Test
   public void testMultipleUnixCommands() throws Exception {
     // Initialize the Props
-    props.put(ProcessJob.COMMAND, "pwd");
-    props.put("command.1", "date");
-    props.put("command.2", "whoami");
+    this.props.put(ProcessJob.COMMAND, "pwd");
+    this.props.put("command.1", "date");
+    this.props.put("command.2", "whoami");
 
-    job.run();
+    this.job.run();
   }
 
   @Test
   public void testPartitionCommand() throws Exception {
-    String test1 = "a b c";
+    final String test1 = "a b c";
 
-    Assert.assertArrayEquals(new String[] { "a", "b", "c" },
+    Assert.assertArrayEquals(new String[]{"a", "b", "c"},
         ProcessJob.partitionCommandLine(test1));
 
-    String test2 = "a 'b c'";
-    Assert.assertArrayEquals(new String[] { "a", "b c" },
+    final String test2 = "a 'b c'";
+    Assert.assertArrayEquals(new String[]{"a", "b c"},
         ProcessJob.partitionCommandLine(test2));
 
-    String test3 = "a e='b c'";
-    Assert.assertArrayEquals(new String[] { "a", "e=b c" },
+    final String test3 = "a e='b c'";
+    Assert.assertArrayEquals(new String[]{"a", "e=b c"},
         ProcessJob.partitionCommandLine(test3));
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/jobExecutor/PythonJobTest.java b/azkaban-common/src/test/java/azkaban/jobExecutor/PythonJobTest.java
index b8737c6..cb307e6 100644
--- a/azkaban-common/src/test/java/azkaban/jobExecutor/PythonJobTest.java
+++ b/azkaban-common/src/test/java/azkaban/jobExecutor/PythonJobTest.java
@@ -16,61 +16,57 @@
 
 package azkaban.jobExecutor;
 
+import azkaban.utils.Props;
 import java.io.IOException;
 import java.util.Date;
-
 import org.apache.log4j.Logger;
-
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import azkaban.utils.Props;
-
 public class PythonJobTest {
-  private PythonJob job = null;
-  // private JobDescriptor descriptor = null;
-  private Props props = null;
-  private Logger log = Logger.getLogger(PythonJob.class);
 
   private static final String scriptContent =
       "#!/usr/bin/python  \n" +
           "import re, string, sys  \n" +
           "# if no arguments were given, print a helpful message \n" +
           "l=len(sys.argv) \n" +
-          "if l < 1: \n"+
-              "\tprint 'Usage: celsium --t temp' \n" +
-              "\tsys.exit(1) \n" +
+          "if l < 1: \n" +
+          "\tprint 'Usage: celsium --t temp' \n" +
+          "\tsys.exit(1) \n" +
           "\n" +
           "# Loop over the arguments \n" +
           "i=1 \n" +
           "while i < l-1 : \n" +
-              "\tname = sys.argv[i] \n" +
-              "\tvalue = sys.argv[i+1] \n" +
-              "\tif name == \"--t\": \n" +
-              "\t\ttry: \n" +
-                      "\t\t\tfahrenheit = float(string.atoi(value)) \n" +
-              "\t\texcept string.atoi_error: \n" +
-                     "\t\t\tprint repr(value), \" not a numeric value\" \n" +
-              "\t\telse: \n" +
-                      "\t\t\tcelsius=(fahrenheit-32)*5.0/9.0 \n" +
-                      "\t\t\tprint '%i F = %iC' % (int(fahrenheit), int(celsius+.5)) \n" +
-                      "\t\t\tsys.exit(0) \n" +
-              "\t\ti=i+2\n";
-
+          "\tname = sys.argv[i] \n" +
+          "\tvalue = sys.argv[i+1] \n" +
+          "\tif name == \"--t\": \n" +
+          "\t\ttry: \n" +
+          "\t\t\tfahrenheit = float(string.atoi(value)) \n" +
+          "\t\texcept string.atoi_error: \n" +
+          "\t\t\tprint repr(value), \" not a numeric value\" \n" +
+          "\t\telse: \n" +
+          "\t\t\tcelsius=(fahrenheit-32)*5.0/9.0 \n" +
+          "\t\t\tprint '%i F = %iC' % (int(fahrenheit), int(celsius+.5)) \n" +
+          "\t\t\tsys.exit(0) \n" +
+          "\t\ti=i+2\n";
   private static String scriptFile;
+  private final Logger log = Logger.getLogger(PythonJob.class);
+  private PythonJob job = null;
+  // private JobDescriptor descriptor = null;
+  private Props props = null;
 
   @BeforeClass
   public static void init() {
 
-    long time = (new Date()).getTime();
+    final long time = (new Date()).getTime();
     scriptFile = "/tmp/azkaban_python" + time + ".py";
     // dump script file
     try {
       Utils.dumpFile(scriptFile, scriptContent);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       e.printStackTrace(System.err);
       Assert.fail("error in creating script file:" + e.getLocalizedMessage());
     }
@@ -90,23 +86,23 @@ public class PythonJobTest {
     /* initialize job */
     // descriptor = EasyMock.createMock(JobDescriptor.class);
 
-    props = new Props();
-    props.put(AbstractProcessJob.WORKING_DIR, ".");
-    props.put("type", "python");
-    props.put("script", scriptFile);
-    props.put("t", "90");
-    props.put("type", "script");
-    props.put("fullPath", ".");
+    this.props = new Props();
+    this.props.put(AbstractProcessJob.WORKING_DIR, ".");
+    this.props.put("type", "python");
+    this.props.put("script", scriptFile);
+    this.props.put("t", "90");
+    this.props.put("type", "script");
+    this.props.put("fullPath", ".");
 
     // EasyMock.expect(descriptor.getId()).andReturn("script").times(1);
     // EasyMock.expect(descriptor.getProps()).andReturn(props).times(3);
     // EasyMock.expect(descriptor.getFullPath()).andReturn(".").times(1);
     // EasyMock.replay(descriptor);
-    job = new PythonJob("TestProcess", props, props, log);
+    this.job = new PythonJob("TestProcess", this.props, this.props, this.log);
     // EasyMock.verify(descriptor);
     try {
-      job.run();
-    } catch (Exception e) {
+      this.job.run();
+    } catch (final Exception e) {
       e.printStackTrace(System.err);
       Assert.fail("Python job failed:" + e.getLocalizedMessage());
     }
diff --git a/azkaban-common/src/test/java/azkaban/jobExecutor/Utils.java b/azkaban-common/src/test/java/azkaban/jobExecutor/Utils.java
index ef23514..44b849d 100644
--- a/azkaban-common/src/test/java/azkaban/jobExecutor/Utils.java
+++ b/azkaban-common/src/test/java/azkaban/jobExecutor/Utils.java
@@ -24,16 +24,17 @@ import java.nio.file.Paths;
 
 
 public class Utils {
+
   private Utils() {
   }
 
-  static void dumpFile(String filename, String fileContent) throws IOException {
+  static void dumpFile(final String filename, final String fileContent) throws IOException {
     try (PrintWriter writer = new PrintWriter(filename, StandardCharsets.UTF_8.toString())) {
       writer.print(fileContent);
     }
   }
 
-  static void removeFile(String filename) throws IOException {
+  static void removeFile(final String filename) throws IOException {
     Files.delete(Paths.get(filename));
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/jobExecutor/WordCountLocal.java b/azkaban-common/src/test/java/azkaban/jobExecutor/WordCountLocal.java
index 653edc9..5fc99d9 100644
--- a/azkaban-common/src/test/java/azkaban/jobExecutor/WordCountLocal.java
+++ b/azkaban-common/src/test/java/azkaban/jobExecutor/WordCountLocal.java
@@ -30,57 +30,57 @@ import org.apache.log4j.Logger;
 
 public class WordCountLocal extends AbstractJob {
 
+  private final Map<String, Integer> dict = new HashMap<>();
   private String input = null;
   private String output = null;
-  private Map<String, Integer> dict = new HashMap<>();
 
-  public static void main(String[] args) throws Exception {
-    String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
-    System.out.println("propsFile: " + propsFile);
-    Props prop = new Props(null, propsFile);
-    WordCountLocal instance = new WordCountLocal("", prop);
-    instance.run();
+  private WordCountLocal(final String id, final Props prop) {
+    super(id, Logger.getLogger(WordCountLocal.class));
+    this.input = prop.getString("input");
+    this.output = prop.getString("output");
   }
 
-  private WordCountLocal(String id, Props prop) {
-    super(id, Logger.getLogger(WordCountLocal.class));
-    input = prop.getString("input");
-    output = prop.getString("output");
+  public static void main(final String[] args) throws Exception {
+    final String propsFile = System.getenv(ProcessJob.JOB_PROP_ENV);
+    System.out.println("propsFile: " + propsFile);
+    final Props prop = new Props(null, propsFile);
+    final WordCountLocal instance = new WordCountLocal("", prop);
+    instance.run();
   }
 
   @Override
   public void run() throws Exception {
 
-    if (input == null) {
+    if (this.input == null) {
       throw new Exception("input file is null");
     }
-    if (output == null) {
+    if (this.output == null) {
       throw new Exception("output file is null");
     }
-    List<String> lines = Files.readAllLines(Paths.get(input), StandardCharsets.UTF_8);
-    for (String line : lines) {
-      StringTokenizer tokenizer = new StringTokenizer(line);
+    final List<String> lines = Files.readAllLines(Paths.get(this.input), StandardCharsets.UTF_8);
+    for (final String line : lines) {
+      final StringTokenizer tokenizer = new StringTokenizer(line);
       while (tokenizer.hasMoreTokens()) {
-        String word = tokenizer.nextToken();
+        final String word = tokenizer.nextToken();
 
         if (word.equals("end_here")) { // expect an out-of-bound
           // exception
           // todo HappyRay: investigate what the following statements are designed to do.
-          String[] errArray = new String[1];
+          final String[] errArray = new String[1];
           System.out.println("string in possition 2 is " + errArray[1]);
         }
 
-        if (dict.containsKey(word)) {
-          Integer num = dict.get(word);
-          dict.put(word, num + 1);
+        if (this.dict.containsKey(word)) {
+          final Integer num = this.dict.get(word);
+          this.dict.put(word, num + 1);
         } else {
-          dict.put(word, 1);
+          this.dict.put(word, 1);
         }
       }
     }
 
-    try (PrintWriter out = new PrintWriter(output, StandardCharsets.UTF_8.toString())) {
-      for (Map.Entry<String, Integer> entry : dict.entrySet()) {
+    try (PrintWriter out = new PrintWriter(this.output, StandardCharsets.UTF_8.toString())) {
+      for (final Map.Entry<String, Integer> entry : this.dict.entrySet()) {
         out.println(entry.getKey() + "\t" + entry.getValue());
       }
     }
diff --git a/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob.java b/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob.java
index cc41f53..ac0f600 100644
--- a/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob.java
+++ b/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob.java
@@ -16,13 +16,14 @@
 
 package azkaban.jobtype;
 
-import org.apache.log4j.Logger;
-
 import azkaban.jobExecutor.JavaProcessJob;
 import azkaban.utils.Props;
+import org.apache.log4j.Logger;
 
 public class FakeJavaJob extends JavaProcessJob {
-  public FakeJavaJob(String jobid, Props sysProps, Props jobProps, Logger log) {
+
+  public FakeJavaJob(final String jobid, final Props sysProps, final Props jobProps,
+      final Logger log) {
     super(jobid, sysProps, jobProps, log);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob2.java b/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob2.java
index ccbd2ed..63dfb89 100644
--- a/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob2.java
+++ b/azkaban-common/src/test/java/azkaban/jobtype/FakeJavaJob2.java
@@ -16,13 +16,14 @@
 
 package azkaban.jobtype;
 
-import org.apache.log4j.Logger;
-
 import azkaban.jobExecutor.JavaProcessJob;
 import azkaban.utils.Props;
+import org.apache.log4j.Logger;
 
 public class FakeJavaJob2 extends JavaProcessJob {
-  public FakeJavaJob2(String jobid, Props sysProps, Props jobProps, Logger log) {
+
+  public FakeJavaJob2(final String jobid, final Props sysProps, final Props jobProps,
+      final Logger log) {
     super(jobid, sysProps, jobProps, log);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/jobtype/JobTypeManagerTest.java b/azkaban-common/src/test/java/azkaban/jobtype/JobTypeManagerTest.java
index 0433214..fb632a8 100644
--- a/azkaban-common/src/test/java/azkaban/jobtype/JobTypeManagerTest.java
+++ b/azkaban-common/src/test/java/azkaban/jobtype/JobTypeManagerTest.java
@@ -16,38 +16,35 @@
 
 package azkaban.jobtype;
 
-import com.google.common.io.Resources;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
 
+import azkaban.jobExecutor.Job;
+import azkaban.utils.Props;
+import com.google.common.io.Resources;
 import java.io.File;
 import java.io.IOException;
 import java.net.URL;
-
 import org.apache.commons.io.FileUtils;
 import org.apache.log4j.Logger;
-
 import org.junit.After;
 import org.junit.Before;
-import org.junit.Test;
 import org.junit.Rule;
+import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
-import static org.junit.Assert.*;
-
-import azkaban.jobExecutor.Job;
-import azkaban.utils.Props;
-
 /**
  * Test the flow run, especially with embedded flows. Files are in
  * unit/plugins/jobtypes
- *
  */
 public class JobTypeManagerTest {
-  @Rule
-  public TemporaryFolder temp = new TemporaryFolder();
 
   public final static String TEST_PLUGIN_DIR = "jobtypes_test";
-
-  private Logger logger = Logger.getLogger(JobTypeManagerTest.class);
+  private final Logger logger = Logger.getLogger(JobTypeManagerTest.class);
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
   private String testPluginDirPath;
   private JobTypeManager manager;
 
@@ -56,37 +53,35 @@ public class JobTypeManagerTest {
 
   @Before
   public void setUp() throws Exception {
-    File jobTypeDir = temp.newFolder(TEST_PLUGIN_DIR);
-    testPluginDirPath = jobTypeDir.getCanonicalPath();
+    final File jobTypeDir = this.temp.newFolder(TEST_PLUGIN_DIR);
+    this.testPluginDirPath = jobTypeDir.getCanonicalPath();
 
-    URL resourceUrl = Resources.getResource("plugins/jobtypes");
+    final URL resourceUrl = Resources.getResource("plugins/jobtypes");
     assertNotNull(resourceUrl);
     FileUtils.copyDirectory(new File(resourceUrl.toURI()), jobTypeDir);
-    manager = new JobTypeManager(testPluginDirPath, null,
+    this.manager = new JobTypeManager(this.testPluginDirPath, null,
         this.getClass().getClassLoader());
   }
 
   @After
   public void tearDown() throws IOException {
-    temp.delete();
+    this.temp.delete();
   }
 
   /**
    * Tests that the common and common private properties are loaded correctly
-   *
-   * @throws Exception
    */
   @Test
   public void testCommonPluginProps() throws Exception {
-    JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
+    final JobTypePluginSet pluginSet = this.manager.getJobTypePluginSet();
 
-    Props props = pluginSet.getCommonPluginJobProps();
+    final Props props = pluginSet.getCommonPluginJobProps();
     System.out.println(props.toString());
     assertEquals("commonprop1", props.getString("commonprop1"));
     assertEquals("commonprop2", props.getString("commonprop2"));
     assertEquals("commonprop3", props.getString("commonprop3"));
 
-    Props priv = pluginSet.getCommonPluginLoadProps();
+    final Props priv = pluginSet.getCommonPluginLoadProps();
     assertEquals("commonprivate1", priv.getString("commonprivate1"));
     assertEquals("commonprivate2", priv.getString("commonprivate2"));
     assertEquals("commonprivate3", priv.getString("commonprivate3"));
@@ -95,31 +90,29 @@ public class JobTypeManagerTest {
   /**
    * Tests that the proper classes were loaded and that the common and the load
    * properties are properly loaded.
-   *
-   * @throws Exception
    */
   @Test
   public void testLoadedClasses() throws Exception {
-    JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
+    final JobTypePluginSet pluginSet = this.manager.getJobTypePluginSet();
 
-    Props props = pluginSet.getCommonPluginJobProps();
+    final Props props = pluginSet.getCommonPluginJobProps();
     System.out.println(props.toString());
     assertEquals("commonprop1", props.getString("commonprop1"));
     assertEquals("commonprop2", props.getString("commonprop2"));
     assertEquals("commonprop3", props.getString("commonprop3"));
     assertNull(props.get("commonprivate1"));
 
-    Props priv = pluginSet.getCommonPluginLoadProps();
+    final Props priv = pluginSet.getCommonPluginLoadProps();
     assertEquals("commonprivate1", priv.getString("commonprivate1"));
     assertEquals("commonprivate2", priv.getString("commonprivate2"));
     assertEquals("commonprivate3", priv.getString("commonprivate3"));
 
     // Testing the anothertestjobtype
-    Class<? extends Job> aPluginClass =
+    final Class<? extends Job> aPluginClass =
         pluginSet.getPluginClass("anothertestjob");
     assertEquals("azkaban.jobtype.FakeJavaJob", aPluginClass.getName());
-    Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
-    Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
+    final Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
+    final Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
 
     // Loader props
     assertEquals("lib/*", aloadProps.get("jobtype.classpath"));
@@ -134,10 +127,10 @@ public class JobTypeManagerTest {
     assertEquals("commonprop3", ajobProps.get("commonprop3"));
     assertNull(ajobProps.get("commonprivate1"));
 
-    Class<? extends Job> tPluginClass = pluginSet.getPluginClass("testjob");
+    final Class<? extends Job> tPluginClass = pluginSet.getPluginClass("testjob");
     assertEquals("azkaban.jobtype.FakeJavaJob2", tPluginClass.getName());
-    Props tjobProps = pluginSet.getPluginJobProps("testjob");
-    Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
+    final Props tjobProps = pluginSet.getPluginJobProps("testjob");
+    final Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
 
     // Loader props
     assertNull(tloadProps.get("jobtype.classpath"));
@@ -161,21 +154,19 @@ public class JobTypeManagerTest {
 
   /**
    * Test building classes
-   *
-   * @throws Exception
    */
   @Test
   public void testBuildClass() throws Exception {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("type", "anothertestjob");
     jobProps.put("test", "test1");
     jobProps.put("pluginprops3", "4");
-    Job job = manager.buildJobExecutor("anothertestjob", jobProps, logger);
+    final Job job = this.manager.buildJobExecutor("anothertestjob", jobProps, this.logger);
 
     assertTrue(job instanceof FakeJavaJob);
-    FakeJavaJob fjj = (FakeJavaJob) job;
+    final FakeJavaJob fjj = (FakeJavaJob) job;
 
-    Props props = fjj.getJobProps();
+    final Props props = fjj.getJobProps();
     assertEquals("test1", props.get("test"));
     assertNull(props.get("pluginprops1"));
     assertEquals("4", props.get("pluginprops3"));
@@ -187,21 +178,19 @@ public class JobTypeManagerTest {
 
   /**
    * Test building classes 2
-   *
-   * @throws Exception
    */
   @Test
   public void testBuildClass2() throws Exception {
-    Props jobProps = new Props();
+    final Props jobProps = new Props();
     jobProps.put("type", "testjob");
     jobProps.put("test", "test1");
     jobProps.put("pluginprops3", "4");
-    Job job = manager.buildJobExecutor("testjob", jobProps, logger);
+    final Job job = this.manager.buildJobExecutor("testjob", jobProps, this.logger);
 
     assertTrue(job instanceof FakeJavaJob2);
-    FakeJavaJob2 fjj = (FakeJavaJob2) job;
+    final FakeJavaJob2 fjj = (FakeJavaJob2) job;
 
-    Props props = fjj.getJobProps();
+    final Props props = fjj.getJobProps();
     assertEquals("test1", props.get("test"));
     assertEquals("1", props.get("pluginprops1"));
     assertEquals("2", props.get("pluginprops2"));
@@ -214,14 +203,12 @@ public class JobTypeManagerTest {
 
   /**
    * Test out reloading properties
-   *
-   * @throws Exception
    */
   @Test
   public void testResetPlugins() throws Exception {
     // Add a plugins file to the anothertestjob folder
-    File anothertestfolder = new File(testPluginDirPath + "/anothertestjob");
-    Props pluginProps = new Props();
+    final File anothertestfolder = new File(this.testPluginDirPath + "/anothertestjob");
+    final Props pluginProps = new Props();
     pluginProps.put("test1", "1");
     pluginProps.put("test2", "2");
     pluginProps.put("pluginprops3", "4");
@@ -229,69 +216,69 @@ public class JobTypeManagerTest {
         .storeFlattened(new File(anothertestfolder, "plugin.properties"));
 
     // clone the testjob folder
-    File testFolder = new File(testPluginDirPath + "/testjob");
-    FileUtils.copyDirectory(testFolder, new File(testPluginDirPath
+    final File testFolder = new File(this.testPluginDirPath + "/testjob");
+    FileUtils.copyDirectory(testFolder, new File(this.testPluginDirPath
         + "/newtestjob"));
 
     // change the common properties
-    Props commonPlugin =
-        new Props(null, testPluginDirPath + "/common.properties");
+    final Props commonPlugin =
+        new Props(null, this.testPluginDirPath + "/common.properties");
     commonPlugin.put("commonprop1", "1");
     commonPlugin.put("newcommonprop1", "2");
     commonPlugin.removeLocal("commonprop2");
     commonPlugin
-        .storeFlattened(new File(testPluginDirPath + "/common.properties"));
+        .storeFlattened(new File(this.testPluginDirPath + "/common.properties"));
 
     // change the common properties
-    Props commonPrivate =
-        new Props(null, testPluginDirPath + "/commonprivate.properties");
+    final Props commonPrivate =
+        new Props(null, this.testPluginDirPath + "/commonprivate.properties");
     commonPrivate.put("commonprivate1", "1");
     commonPrivate.put("newcommonprivate1", "2");
     commonPrivate.removeLocal("commonprivate2");
-    commonPrivate.storeFlattened(new File(testPluginDirPath
+    commonPrivate.storeFlattened(new File(this.testPluginDirPath
         + "/commonprivate.properties"));
 
     // change testjob private property
-    Props loadProps =
-        new Props(null, testPluginDirPath + "/testjob/private.properties");
+    final Props loadProps =
+        new Props(null, this.testPluginDirPath + "/testjob/private.properties");
     loadProps.put("privatetest", "test");
 
     // Reload the plugins here!!
-    manager.loadPlugins();
+    this.manager.loadPlugins();
 
     // Checkout common props
-    JobTypePluginSet pluginSet = manager.getJobTypePluginSet();
-    Props commonProps = pluginSet.getCommonPluginJobProps();
+    final JobTypePluginSet pluginSet = this.manager.getJobTypePluginSet();
+    final Props commonProps = pluginSet.getCommonPluginJobProps();
     assertEquals("1", commonProps.get("commonprop1"));
     assertEquals("commonprop3", commonProps.get("commonprop3"));
     assertEquals("2", commonProps.get("newcommonprop1"));
     assertNull(commonProps.get("commonprop2"));
 
     // Checkout common private
-    Props commonPrivateProps = pluginSet.getCommonPluginLoadProps();
+    final Props commonPrivateProps = pluginSet.getCommonPluginLoadProps();
     assertEquals("1", commonPrivateProps.get("commonprivate1"));
     assertEquals("commonprivate3", commonPrivateProps.get("commonprivate3"));
     assertEquals("2", commonPrivateProps.get("newcommonprivate1"));
     assertNull(commonPrivateProps.get("commonprivate2"));
 
     // Verify anothertestjob changes
-    Class<? extends Job> atjClass = pluginSet.getPluginClass("anothertestjob");
+    final Class<? extends Job> atjClass = pluginSet.getPluginClass("anothertestjob");
     assertEquals("azkaban.jobtype.FakeJavaJob", atjClass.getName());
-    Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
+    final Props ajobProps = pluginSet.getPluginJobProps("anothertestjob");
     assertEquals("1", ajobProps.get("test1"));
     assertEquals("2", ajobProps.get("test2"));
     assertEquals("4", ajobProps.get("pluginprops3"));
     assertEquals("commonprop3", ajobProps.get("commonprop3"));
 
-    Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
+    final Props aloadProps = pluginSet.getPluginLoaderProps("anothertestjob");
     assertEquals("1", aloadProps.get("commonprivate1"));
     assertNull(aloadProps.get("commonprivate2"));
     assertEquals("commonprivate3", aloadProps.get("commonprivate3"));
 
     // Verify testjob changes
-    Class<? extends Job> tjClass = pluginSet.getPluginClass("testjob");
+    final Class<? extends Job> tjClass = pluginSet.getPluginClass("testjob");
     assertEquals("azkaban.jobtype.FakeJavaJob2", tjClass.getName());
-    Props tjobProps = pluginSet.getPluginJobProps("testjob");
+    final Props tjobProps = pluginSet.getPluginJobProps("testjob");
     assertEquals("1", tjobProps.get("commonprop1"));
     assertEquals("2", tjobProps.get("newcommonprop1"));
     assertEquals("1", tjobProps.get("pluginprops1"));
@@ -300,7 +287,7 @@ public class JobTypeManagerTest {
     assertEquals("pluginprops", tjobProps.get("commonprop3"));
     assertNull(tjobProps.get("commonprop2"));
 
-    Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
+    final Props tloadProps = pluginSet.getPluginLoaderProps("testjob");
     assertNull(tloadProps.get("jobtype.classpath"));
     assertEquals("azkaban.jobtype.FakeJavaJob2",
         tloadProps.get("jobtype.class"));
@@ -309,10 +296,10 @@ public class JobTypeManagerTest {
     assertEquals("private3", tloadProps.get("commonprivate3"));
 
     // Verify newtestjob
-    Class<? extends Job> ntPluginClass = pluginSet.getPluginClass("newtestjob");
+    final Class<? extends Job> ntPluginClass = pluginSet.getPluginClass("newtestjob");
     assertEquals("azkaban.jobtype.FakeJavaJob2", ntPluginClass.getName());
-    Props ntjobProps = pluginSet.getPluginJobProps("newtestjob");
-    Props ntloadProps = pluginSet.getPluginLoaderProps("newtestjob");
+    final Props ntjobProps = pluginSet.getPluginJobProps("newtestjob");
+    final Props ntloadProps = pluginSet.getPluginLoaderProps("newtestjob");
 
     // Loader props
     assertNull(ntloadProps.get("jobtype.classpath"));
diff --git a/azkaban-common/src/test/java/azkaban/metric/FakeMetric.java b/azkaban-common/src/test/java/azkaban/metric/FakeMetric.java
index 4043cb7..b7751ab 100644
--- a/azkaban-common/src/test/java/azkaban/metric/FakeMetric.java
+++ b/azkaban-common/src/test/java/azkaban/metric/FakeMetric.java
@@ -3,9 +3,9 @@ package azkaban.metric;
 /**
  * Dummy Metric to test Azkaban Metrics
  */
-public class FakeMetric extends AbstractMetric<Integer>{
+public class FakeMetric extends AbstractMetric<Integer> {
 
-  public FakeMetric(MetricReportManager manager) {
+  public FakeMetric(final MetricReportManager manager) {
     super("FakeMetric", "int", 4, manager);
   }
 
@@ -13,42 +13,53 @@ public class FakeMetric extends AbstractMetric<Integer>{
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + ((metricManager == null) ? 0 : metricManager.hashCode());
-    result = prime * result + ((name == null) ? 0 : name.hashCode());
-    result = prime * result + ((type == null) ? 0 : type.hashCode());
-    result = prime * result + ((value == null) ? 0 : value.hashCode());
+    result = prime * result + ((this.metricManager == null) ? 0 : this.metricManager.hashCode());
+    result = prime * result + ((this.name == null) ? 0 : this.name.hashCode());
+    result = prime * result + ((this.type == null) ? 0 : this.type.hashCode());
+    result = prime * result + ((this.value == null) ? 0 : this.value.hashCode());
     return result;
   }
 
   @Override
-  public boolean equals(Object obj) {
-    if (this == obj)
+  public boolean equals(final Object obj) {
+    if (this == obj) {
       return true;
-    if (obj == null)
+    }
+    if (obj == null) {
       return false;
-    if (!(obj instanceof FakeMetric))
+    }
+    if (!(obj instanceof FakeMetric)) {
       return false;
-    FakeMetric other = (FakeMetric) obj;
-    if (metricManager == null) {
-      if (other.metricManager != null)
+    }
+    final FakeMetric other = (FakeMetric) obj;
+    if (this.metricManager == null) {
+      if (other.metricManager != null) {
         return false;
-    } else if (!metricManager.equals(other.metricManager))
+      }
+    } else if (!this.metricManager.equals(other.metricManager)) {
       return false;
-    if (name == null) {
-      if (other.name != null)
+    }
+    if (this.name == null) {
+      if (other.name != null) {
         return false;
-    } else if (!name.equals(other.name))
+      }
+    } else if (!this.name.equals(other.name)) {
       return false;
-    if (type == null) {
-      if (other.type != null)
+    }
+    if (this.type == null) {
+      if (other.type != null) {
         return false;
-    } else if (!type.equals(other.type))
+      }
+    } else if (!this.type.equals(other.type)) {
       return false;
-    if (value == null) {
-      if (other.value != null)
+    }
+    if (this.value == null) {
+      if (other.value != null) {
         return false;
-    } else if (!value.equals(other.value))
+      }
+    } else if (!this.value.equals(other.value)) {
       return false;
+    }
     return true;
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/metric/MetricManagerTest.java b/azkaban-common/src/test/java/azkaban/metric/MetricManagerTest.java
index f67e25e..7aec3c9 100644
--- a/azkaban-common/src/test/java/azkaban/metric/MetricManagerTest.java
+++ b/azkaban-common/src/test/java/azkaban/metric/MetricManagerTest.java
@@ -1,31 +1,34 @@
 package azkaban.metric;
 
-import java.util.Date;
-import java.util.List;
-
-import org.junit.Before;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
 
 import azkaban.metric.inmemoryemitter.InMemoryHistoryNode;
 import azkaban.metric.inmemoryemitter.InMemoryMetricEmitter;
 import azkaban.utils.Props;
-import static org.junit.Assert.*;
+import java.util.Date;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
 
 /**
  * Azkaban Metric Manager Tests
  */
 public class MetricManagerTest {
+
   MetricReportManager manager;
   FakeMetric metric;
   InMemoryMetricEmitter emitter;
 
   @Before
   public void setUp() throws Exception {
-    manager = MetricReportManager.getInstance();
-    metric = new FakeMetric(manager);
-    manager.addMetric(metric);
-    emitter = new InMemoryMetricEmitter(new Props());
-    manager.addMetricEmitter(emitter);
+    this.manager = MetricReportManager.getInstance();
+    this.metric = new FakeMetric(this.manager);
+    this.manager.addMetric(this.metric);
+    this.emitter = new InMemoryMetricEmitter(new Props());
+    this.manager.addMetricEmitter(this.emitter);
   }
 
   /**
@@ -33,11 +36,11 @@ public class MetricManagerTest {
    */
   @Test
   public void managerStatusTest() {
-    assertNotNull("Singleton Failed to instantiate", manager);
+    assertNotNull("Singleton Failed to instantiate", this.manager);
     assertTrue("Failed to enable metric manager", MetricReportManager.isAvailable());
-    manager.disableManager();
+    this.manager.disableManager();
     assertFalse("Failed to disable metric manager", MetricReportManager.isAvailable());
-    manager.enableManager();
+    this.manager.enableManager();
     assertTrue("Failed to enable metric manager", MetricReportManager.isAvailable());
   }
 
@@ -46,9 +49,10 @@ public class MetricManagerTest {
    */
   @Test
   public void managerMetricMaintenanceTest() {
-    assertEquals("Failed to add metric", manager.getAllMetrics().size(), 1);
-    assertTrue("Failed to add metric", manager.getAllMetrics().contains(metric));
-    assertEquals("Failed to get metric by Name", manager.getMetricFromName("FakeMetric"), metric);
+    assertEquals("Failed to add metric", this.manager.getAllMetrics().size(), 1);
+    assertTrue("Failed to add metric", this.manager.getAllMetrics().contains(this.metric));
+    assertEquals("Failed to get metric by Name", this.manager.getMetricFromName("FakeMetric"),
+        this.metric);
   }
 
   /**
@@ -56,27 +60,28 @@ public class MetricManagerTest {
    */
   @Test
   public void managerEmitterMaintenanceTest() {
-    assertTrue("Failed to add Emitter", manager.getMetricEmitters().contains(emitter));
+    assertTrue("Failed to add Emitter", this.manager.getMetricEmitters().contains(this.emitter));
 
-    int originalSize = manager.getMetricEmitters().size();
-    manager.removeMetricEmitter(emitter);
-    assertEquals("Failed to remove emitter", manager.getMetricEmitters().size(), originalSize - 1);
-    manager.addMetricEmitter(emitter);
+    final int originalSize = this.manager.getMetricEmitters().size();
+    this.manager.removeMetricEmitter(this.emitter);
+    assertEquals("Failed to remove emitter", this.manager.getMetricEmitters().size(),
+        originalSize - 1);
+    this.manager.addMetricEmitter(this.emitter);
   }
 
   /**
    * Test metric reporting methods, including InMemoryMetricEmitter methods
-   * @throws Exception
    */
   @Test
   public void managerEmitterHandlingTest() throws Exception {
-    emitter.purgeAllData();
-    Date from = new Date();
-    metric.notifyManager();
+    this.emitter.purgeAllData();
+    final Date from = new Date();
+    this.metric.notifyManager();
 
     Thread.sleep(2000);
 
-    List<InMemoryHistoryNode> nodes = emitter.getMetrics("FakeMetric", from, new Date(), false);
+    final List<InMemoryHistoryNode> nodes = this.emitter
+        .getMetrics("FakeMetric", from, new Date(), false);
 
     assertEquals("Failed to report metric", 1, nodes.size());
     assertEquals("Failed to report metric", nodes.get(0).getValue(), 4);
diff --git a/azkaban-common/src/test/java/azkaban/metrics/CommonMetricsTest.java b/azkaban-common/src/test/java/azkaban/metrics/CommonMetricsTest.java
index 75ffc5a..0a29dee 100644
--- a/azkaban-common/src/test/java/azkaban/metrics/CommonMetricsTest.java
+++ b/azkaban-common/src/test/java/azkaban/metrics/CommonMetricsTest.java
@@ -16,13 +16,14 @@
 
 package azkaban.metrics;
 
+import static org.junit.Assert.assertEquals;
+
 import org.junit.Before;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-
 
 public class CommonMetricsTest {
+
   private MetricsTestUtility testUtil;
   private CommonMetrics metrics;
 
@@ -34,22 +35,22 @@ public class CommonMetricsTest {
     // the registry.
     // This can also cause problem when we run tests in parallel in the future.
     // todo HappyRay: move MetricsManager, CommonMetrics to use Juice.
-    testUtil = new MetricsTestUtility(MetricsManager.INSTANCE.getRegistry());
-    metrics = CommonMetrics.INSTANCE;
+    this.testUtil = new MetricsTestUtility(MetricsManager.INSTANCE.getRegistry());
+    this.metrics = CommonMetrics.INSTANCE;
   }
 
   @Test
   public void testDBConnectionTimeMetrics() {
-    metrics.setDBConnectionTime(14);
-    assertEquals(14, testUtil.getGaugeValue("dbConnectionTime"));
+    this.metrics.setDBConnectionTime(14);
+    assertEquals(14, this.testUtil.getGaugeValue("dbConnectionTime"));
   }
 
   @Test
   public void testOOMWaitingJobMetrics() {
     final String metricName = "OOM-waiting-job-count";
 
-    assertEquals(0, testUtil.getGaugeValue(metricName));
-    metrics.incrementOOMJobWaitCount();
-    assertEquals(1, testUtil.getGaugeValue(metricName));
+    assertEquals(0, this.testUtil.getGaugeValue(metricName));
+    this.metrics.incrementOOMJobWaitCount();
+    assertEquals(1, this.testUtil.getGaugeValue(metricName));
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/metrics/MetricsTestUtility.java b/azkaban-common/src/test/java/azkaban/metrics/MetricsTestUtility.java
index 545b0af..e270d2e 100644
--- a/azkaban-common/src/test/java/azkaban/metrics/MetricsTestUtility.java
+++ b/azkaban-common/src/test/java/azkaban/metrics/MetricsTestUtility.java
@@ -26,14 +26,14 @@ public class MetricsTestUtility {
 
   // todo HappyRay: move singletons to Juice.
   // This can cause problems when we run tests in parallel in the future.
-  private MetricRegistry registry;
+  private final MetricRegistry registry;
 
-  public MetricsTestUtility(MetricRegistry registry) {
+  public MetricsTestUtility(final MetricRegistry registry) {
     this.registry = registry;
   }
 
-  public long getGaugeValue(String name) {
+  public long getGaugeValue(final String name) {
     // Assume that the gauge value can be converted to type long.
-    return (long) registry.getGauges().get(name).getValue();
+    return (long) this.registry.getGauges().get(name).getValue();
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/project/DirectoryFlowLoaderTest.java b/azkaban-common/src/test/java/azkaban/project/DirectoryFlowLoaderTest.java
index bc2c4cd..f1e148c 100644
--- a/azkaban-common/src/test/java/azkaban/project/DirectoryFlowLoaderTest.java
+++ b/azkaban-common/src/test/java/azkaban/project/DirectoryFlowLoaderTest.java
@@ -16,51 +16,48 @@
 
 package azkaban.project;
 
-import java.io.File;
-import java.net.URL;
-import java.net.URISyntaxException;
-
 import azkaban.test.executions.TestExecutions;
 import azkaban.utils.Props;
-
+import java.net.URISyntaxException;
 import org.apache.log4j.Logger;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
 public class DirectoryFlowLoaderTest {
+
   private Project project;
 
   @Before
   public void setUp() {
-    project = new Project(11, "myTestProject");
+    this.project = new Project(11, "myTestProject");
   }
 
   @Test
   public void testDirectoryLoad() throws URISyntaxException {
-    Logger logger = Logger.getLogger(this.getClass());
-    DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
+    final Logger logger = Logger.getLogger(this.getClass());
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
 
-    loader.loadProjectFlow(project, TestExecutions.getFlowDir("exectest1"));
+    loader.loadProjectFlow(this.project, TestExecutions.getFlowDir("exectest1"));
     logger.info(loader.getFlowMap().size());
   }
 
   @Test
   public void testLoadEmbeddedFlow() throws URISyntaxException {
-    Logger logger = Logger.getLogger(this.getClass());
-    DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
+    final Logger logger = Logger.getLogger(this.getClass());
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
 
-    loader.loadProjectFlow(project, TestExecutions.getFlowDir("embedded"));
+    loader.loadProjectFlow(this.project, TestExecutions.getFlowDir("embedded"));
     Assert.assertEquals(0, loader.getErrors().size());
   }
 
   @Test
   public void testRecursiveLoadEmbeddedFlow() throws URISyntaxException {
-    Logger logger = Logger.getLogger(this.getClass());
-    DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
+    final Logger logger = Logger.getLogger(this.getClass());
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
 
-    loader.loadProjectFlow(project, TestExecutions.getFlowDir("embedded_bad"));
-    for (String error : loader.getErrors()) {
+    loader.loadProjectFlow(this.project, TestExecutions.getFlowDir("embedded_bad"));
+    for (final String error : loader.getErrors()) {
       System.out.println(error);
     }
 
diff --git a/azkaban-common/src/test/java/azkaban/project/JdbcProjectLoaderTest.java b/azkaban-common/src/test/java/azkaban/project/JdbcProjectLoaderTest.java
index f78456d..71d952c 100644
--- a/azkaban-common/src/test/java/azkaban/project/JdbcProjectLoaderTest.java
+++ b/azkaban-common/src/test/java/azkaban/project/JdbcProjectLoaderTest.java
@@ -16,15 +16,23 @@
 
 package azkaban.project;
 
+import azkaban.database.DataSourceUtils;
+import azkaban.flow.Edge;
+import azkaban.flow.Flow;
+import azkaban.flow.Node;
+import azkaban.project.ProjectLogEvent.EventType;
+import azkaban.user.Permission;
+import azkaban.user.User;
+import azkaban.utils.Pair;
+import azkaban.utils.Props;
+import azkaban.utils.PropsUtils;
 import java.io.File;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.HashMap;
 import java.util.List;
-
 import javax.sql.DataSource;
-
 import org.apache.commons.dbutils.DbUtils;
 import org.apache.commons.dbutils.QueryRunner;
 import org.apache.commons.dbutils.ResultSetHandler;
@@ -33,29 +41,19 @@ import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import azkaban.database.DataSourceUtils;
-import azkaban.flow.Edge;
-import azkaban.flow.Flow;
-import azkaban.flow.Node;
-import azkaban.project.ProjectLogEvent.EventType;
-import azkaban.user.Permission;
-import azkaban.user.User;
-import azkaban.utils.Pair;
-import azkaban.utils.Props;
-import azkaban.utils.PropsUtils;
-
 public class JdbcProjectLoaderTest {
-  private static boolean testDBExists;
+
   private static final String host = "localhost";
   private static final int port = 3306;
   private static final String database = "test";
   private static final String user = "azkaban";
   private static final String password = "azkaban";
   private static final int numConnections = 10;
+  private static boolean testDBExists;
 
   @BeforeClass
   public static void setupDB() {
-    DataSource dataSource =
+    final DataSource dataSource =
         DataSourceUtils.getMySQLDataSource(host, port, database, user,
             password, numConnections);
     testDBExists = true;
@@ -63,18 +61,18 @@ public class JdbcProjectLoaderTest {
     Connection connection = null;
     try {
       connection = dataSource.getConnection();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
       return;
     }
 
-    CountHandler countHandler = new CountHandler();
-    QueryRunner runner = new QueryRunner();
+    final CountHandler countHandler = new CountHandler();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.query(connection, "SELECT COUNT(1) FROM projects", countHandler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -84,7 +82,7 @@ public class JdbcProjectLoaderTest {
     try {
       runner.query(connection, "SELECT COUNT(1) FROM project_events",
           countHandler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -94,7 +92,7 @@ public class JdbcProjectLoaderTest {
     try {
       runner.query(connection, "SELECT COUNT(1) FROM project_permissions",
           countHandler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -104,7 +102,7 @@ public class JdbcProjectLoaderTest {
     try {
       runner.query(connection, "SELECT COUNT(1) FROM project_files",
           countHandler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -114,7 +112,7 @@ public class JdbcProjectLoaderTest {
     try {
       runner.query(connection, "SELECT COUNT(1) FROM project_flows",
           countHandler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -124,7 +122,7 @@ public class JdbcProjectLoaderTest {
     try {
       runner.query(connection, "SELECT COUNT(1) FROM project_properties",
           countHandler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -141,24 +139,24 @@ public class JdbcProjectLoaderTest {
       return;
     }
 
-    DataSource dataSource =
+    final DataSource dataSource =
         DataSourceUtils.getMySQLDataSource(host, port, database, user,
             password, numConnections);
     Connection connection = null;
     try {
       connection = dataSource.getConnection();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
       return;
     }
 
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.update(connection, "DELETE FROM projects");
 
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -167,7 +165,7 @@ public class JdbcProjectLoaderTest {
 
     try {
       runner.update(connection, "DELETE FROM project_events");
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -176,7 +174,7 @@ public class JdbcProjectLoaderTest {
 
     try {
       runner.update(connection, "DELETE FROM project_permissions");
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -185,7 +183,7 @@ public class JdbcProjectLoaderTest {
 
     try {
       runner.update(connection, "DELETE FROM project_files");
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -194,7 +192,7 @@ public class JdbcProjectLoaderTest {
 
     try {
       runner.update(connection, "DELETE FROM project_flows");
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -203,7 +201,7 @@ public class JdbcProjectLoaderTest {
 
     try {
       runner.update(connection, "DELETE FROM project_properties");
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -213,95 +211,103 @@ public class JdbcProjectLoaderTest {
     DbUtils.closeQuietly(connection);
   }
 
-    /** Test case to validated permissions for fetchProjectByName **/
-    @Test
-    public void testPermissionRetrivalByFetchProjectByName()
-        throws ProjectManagerException {
-        if (!isTestSetup()) {
-            return;
-        }
+  /**
+   * Test case to validated permissions for fetchProjectByName
+   **/
+  @Test
+  public void testPermissionRetrivalByFetchProjectByName()
+      throws ProjectManagerException {
+    if (!isTestSetup()) {
+      return;
+    }
 
-        ProjectLoader loader = createLoader();
-        String projectName = "mytestProject";
-        String projectDescription = "This is my new project";
-        User user = new User("testUser");
+    final ProjectLoader loader = createLoader();
+    final String projectName = "mytestProject";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-        Project project =
-            loader.createNewProject(projectName, projectDescription, user);
+    final Project project =
+        loader.createNewProject(projectName, projectDescription, user);
 
-        Permission perm = new Permission(0x2);
-        loader.updatePermission(project, user.getUserId(), perm, false);
-        loader.updatePermission(project, "group", perm, true);
+    final Permission perm = new Permission(0x2);
+    loader.updatePermission(project, user.getUserId(), perm, false);
+    loader.updatePermission(project, "group", perm, true);
 
-        Permission permOverride = new Permission(0x6);
-        loader.updatePermission(project, user.getUserId(), permOverride, false);
+    final Permission permOverride = new Permission(0x6);
+    loader.updatePermission(project, user.getUserId(), permOverride, false);
 
-        Project fetchedProject = loader.fetchProjectByName(project.getName());
-        assertProjectMemberEquals(project, fetchedProject);
-        Assert.assertEquals(permOverride,
-            fetchedProject.getUserPermission(user.getUserId()));
+    final Project fetchedProject = loader.fetchProjectByName(project.getName());
+    assertProjectMemberEquals(project, fetchedProject);
+    Assert.assertEquals(permOverride,
+        fetchedProject.getUserPermission(user.getUserId()));
+  }
+
+  /**
+   * Default Test case for fetchProjectByName
+   **/
+  @Test
+  public void testProjectRetrievalByFetchProjectByName()
+      throws ProjectManagerException {
+    if (!isTestSetup()) {
+      return;
     }
 
-    /** Default Test case for fetchProjectByName **/
-    @Test
-    public void testProjectRetrievalByFetchProjectByName()
-        throws ProjectManagerException {
-        if (!isTestSetup()) {
-            return;
-        }
+    final ProjectLoader loader = createLoader();
+    final String projectName = "mytestProject";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-        ProjectLoader loader = createLoader();
-        String projectName = "mytestProject";
-        String projectDescription = "This is my new project";
-        User user = new User("testUser");
+    final Project project =
+        loader.createNewProject(projectName, projectDescription, user);
 
-        Project project =
-            loader.createNewProject(projectName, projectDescription, user);
+    final Project fetchedProject = loader.fetchProjectByName(project.getName());
+    assertProjectMemberEquals(project, fetchedProject);
+  }
 
-        Project fetchedProject = loader.fetchProjectByName(project.getName());
-        assertProjectMemberEquals(project, fetchedProject);
+  /**
+   * Default Test case for fetchProjectByName
+   **/
+  @Test
+  public void testDuplicateRetrivalByFetchProjectByName()
+      throws ProjectManagerException {
+    if (!isTestSetup()) {
+      return;
     }
 
-    /** Default Test case for fetchProjectByName **/
-    @Test
-    public void testDuplicateRetrivalByFetchProjectByName()
-        throws ProjectManagerException {
-        if (!isTestSetup()) {
-            return;
-        }
+    final ProjectLoader loader = createLoader();
+    final String projectName = "mytestProject";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-        ProjectLoader loader = createLoader();
-        String projectName = "mytestProject";
-        String projectDescription = "This is my new project";
-        User user = new User("testUser");
+    final Project project =
+        loader.createNewProject(projectName, projectDescription, user);
 
-        Project project =
-            loader.createNewProject(projectName, projectDescription, user);
+    loader.removeProject(project, user.getUserId());
 
-        loader.removeProject(project, user.getUserId());
+    final Project newProject =
+        loader.createNewProject(projectName, projectDescription, user);
 
-        Project newProject =
-            loader.createNewProject(projectName, projectDescription, user);
+    final Project fetchedProject = loader.fetchProjectByName(project.getName());
+    Assert.assertEquals(newProject.getId(), fetchedProject.getId());
 
-        Project fetchedProject = loader.fetchProjectByName(project.getName());
-        Assert.assertEquals(newProject.getId(), fetchedProject.getId());
+  }
 
+  /**
+   * Test case for NonExistantProject project fetch
+   **/
+  @Test
+  public void testInvalidProjectByFetchProjectByName() {
+    if (!isTestSetup()) {
+      return;
     }
-
-    /** Test case for NonExistantProject project fetch **/
-    @Test
-    public void testInvalidProjectByFetchProjectByName() {
-        if (!isTestSetup()) {
-            return;
-        }
-        ProjectLoader loader = createLoader();
-        try {
-            loader.fetchProjectByName("NonExistantProject");
-        } catch (ProjectManagerException ex) {
-            System.out.println("Test true");
-        }
-        Assert.fail("Expecting exception, but didn't get one");
+    final ProjectLoader loader = createLoader();
+    try {
+      loader.fetchProjectByName("NonExistantProject");
+    } catch (final ProjectManagerException ex) {
+      System.out.println("Test true");
     }
+    Assert.fail("Expecting exception, but didn't get one");
+  }
 
   @Test
   public void testCreateProject() throws ProjectManagerException {
@@ -309,12 +315,12 @@ public class JdbcProjectLoaderTest {
       return;
     }
 
-    ProjectLoader loader = createLoader();
-    String projectName = "mytestProject";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final ProjectLoader loader = createLoader();
+    final String projectName = "mytestProject";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    Project project =
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
     Assert.assertTrue("Project Id set", project.getId() > -1);
     Assert.assertEquals("Project name", projectName, project.getName());
@@ -322,7 +328,7 @@ public class JdbcProjectLoaderTest {
         project.getDescription());
 
     System.out.println("Test true");
-    Project project2 = loader.fetchProjectById(project.getId());
+    final Project project2 = loader.fetchProjectById(project.getId());
     assertProjectMemberEquals(project, project2);
   }
 
@@ -332,27 +338,27 @@ public class JdbcProjectLoaderTest {
       return;
     }
 
-    ProjectLoader loader = createLoader();
-    String projectName = "testRemoveProject";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final ProjectLoader loader = createLoader();
+    final String projectName = "testRemoveProject";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    Project project =
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
     Assert.assertTrue("Project Id set", project.getId() > -1);
     Assert.assertEquals("Project name", projectName, project.getName());
     Assert.assertEquals("Project description", projectDescription,
         project.getDescription());
 
-    Project project2 = loader.fetchProjectById(project.getId());
+    final Project project2 = loader.fetchProjectById(project.getId());
     assertProjectMemberEquals(project, project2);
     loader.removeProject(project, user.getUserId());
 
-    Project project3 = loader.fetchProjectById(project.getId());
+    final Project project3 = loader.fetchProjectById(project.getId());
     Assert.assertFalse(project3.isActive());
 
-    List<Project> projList = loader.fetchAllActiveProjects();
-    for (Project proj : projList) {
+    final List<Project> projList = loader.fetchAllActiveProjects();
+    for (final Project proj : projList) {
       Assert.assertTrue(proj.getId() != project.getId());
     }
   }
@@ -363,30 +369,30 @@ public class JdbcProjectLoaderTest {
       return;
     }
 
-    ProjectLoader loader = createLoader();
-    String projectName = "mytestProject1";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final ProjectLoader loader = createLoader();
+    final String projectName = "mytestProject1";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    Project project =
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
     Assert.assertTrue("Project Id set", project.getId() > -1);
     Assert.assertEquals("Project name", projectName, project.getName());
     Assert.assertEquals("Project description", projectDescription,
         project.getDescription());
 
-    Permission perm = new Permission(0x2);
+    final Permission perm = new Permission(0x2);
     loader.updatePermission(project, user.getUserId(), new Permission(0x2),
         false);
     loader.updatePermission(project, "group1", new Permission(0x2), true);
     Assert.assertEquals(perm, project.getUserPermission(user.getUserId()));
 
-    Permission permOverride = new Permission(0x6);
+    final Permission permOverride = new Permission(0x6);
     loader.updatePermission(project, user.getUserId(), permOverride, false);
     Assert.assertEquals(permOverride,
         project.getUserPermission(user.getUserId()));
 
-    Project project2 = loader.fetchProjectById(project.getId());
+    final Project project2 = loader.fetchProjectById(project.getId());
     assertProjectMemberEquals(project, project2);
     Assert.assertEquals(permOverride,
         project2.getUserPermission(user.getUserId()));
@@ -398,40 +404,41 @@ public class JdbcProjectLoaderTest {
       return;
     }
 
-    ProjectLoader loader = createLoader();
-    String projectName = "testProjectEventLogs";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final ProjectLoader loader = createLoader();
+    final String projectName = "testProjectEventLogs";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    String message = "My message";
-    EventType type = EventType.USER_PERMISSION;
-    Project project =
+    final String message = "My message";
+    final EventType type = EventType.USER_PERMISSION;
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
     loader.postEvent(project, type, user.getUserId(), message);
 
-    List<ProjectLogEvent> events = loader.getProjectEvents(project, 10, 0);
+    final List<ProjectLogEvent> events = loader.getProjectEvents(project, 10, 0);
     Assert.assertTrue(events.size() == 1);
 
-    ProjectLogEvent event = events.get(0);
+    final ProjectLogEvent event = events.get(0);
     Assert.assertEquals(event.getProjectId(), project.getId());
     Assert.assertEquals(event.getUser(), user.getUserId());
     Assert.assertEquals(event.getMessage(), message);
     Assert.assertEquals(event.getType(), type);
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testFlowUpload() throws ProjectManagerException {
-    ProjectLoader loader = createLoader();
+    final ProjectLoader loader = createLoader();
     ((JdbcProjectLoader) loader)
         .setDefaultEncodingType(JdbcProjectLoader.EncodingType.GZIP);
-    String projectName = "mytestFlowUpload1";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final String projectName = "mytestFlowUpload1";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    Project project =
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
 
-    Flow flow = new Flow("MyNewFlow");
+    final Flow flow = new Flow("MyNewFlow");
 
     flow.addNode(new Node("A"));
     flow.addNode(new Node("B"));
@@ -447,26 +454,27 @@ public class JdbcProjectLoaderTest {
 
     loader.uploadFlow(project, 4, flow);
     project.setVersion(4);
-    Flow newFlow = loader.fetchFlow(project, flow.getId());
+    final Flow newFlow = loader.fetchFlow(project, flow.getId());
     Assert.assertTrue(newFlow != null);
     Assert.assertEquals(flow.getId(), newFlow.getId());
     Assert.assertEquals(flow.getEdges().size(), newFlow.getEdges().size());
     Assert.assertEquals(flow.getNodes().size(), newFlow.getNodes().size());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testFlowUploadPlain() throws ProjectManagerException {
-    ProjectLoader loader = createLoader();
+    final ProjectLoader loader = createLoader();
     ((JdbcProjectLoader) loader)
         .setDefaultEncodingType(JdbcProjectLoader.EncodingType.PLAIN);
-    String projectName = "mytestFlowUpload2";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final String projectName = "mytestFlowUpload2";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    Project project =
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
 
-    Flow flow = new Flow("MyNewFlow2");
+    final Flow flow = new Flow("MyNewFlow2");
 
     flow.addNode(new Node("A1"));
     flow.addNode(new Node("B1"));
@@ -482,36 +490,37 @@ public class JdbcProjectLoaderTest {
 
     loader.uploadFlow(project, 4, flow);
     project.setVersion(4);
-    Flow newFlow = loader.fetchFlow(project, flow.getId());
+    final Flow newFlow = loader.fetchFlow(project, flow.getId());
     Assert.assertTrue(newFlow != null);
     Assert.assertEquals(flow.getId(), newFlow.getId());
     Assert.assertEquals(flow.getEdges().size(), newFlow.getEdges().size());
     Assert.assertEquals(flow.getNodes().size(), newFlow.getNodes().size());
 
-    List<Flow> flows = loader.fetchAllProjectFlows(project);
+    final List<Flow> flows = loader.fetchAllProjectFlows(project);
     Assert.assertTrue(flows.size() == 1);
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testProjectProperties() throws ProjectManagerException {
-    ProjectLoader loader = createLoader();
+    final ProjectLoader loader = createLoader();
     ((JdbcProjectLoader) loader)
         .setDefaultEncodingType(JdbcProjectLoader.EncodingType.PLAIN);
-    String projectName = "testProjectProperties";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final String projectName = "testProjectProperties";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    Project project =
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
     project.setVersion(5);
-    Props props = new Props();
+    final Props props = new Props();
     props.put("a", "abc");
     props.put("b", "bcd");
     props.put("c", "cde");
     props.setSource("mysource");
     loader.uploadProjectProperty(project, props);
 
-    Props retProps = loader.fetchProjectProperty(project, "mysource");
+    final Props retProps = loader.fetchProjectProperty(project, "mysource");
 
     Assert.assertEquals(retProps.getSource(), props.getSource());
     Assert.assertEquals(retProps.getKeySet(), props.getKeySet());
@@ -525,28 +534,28 @@ public class JdbcProjectLoaderTest {
       return;
     }
 
-    ProjectLoader loader = createLoader();
-    String projectName = "testProjectFilesUpload1";
-    String projectDescription = "This is my new project";
-    User user = new User("testUser");
+    final ProjectLoader loader = createLoader();
+    final String projectName = "testProjectFilesUpload1";
+    final String projectDescription = "This is my new project";
+    final User user = new User("testUser");
 
-    Project project =
+    final Project project =
         loader.createNewProject(projectName, projectDescription, user);
     Assert.assertTrue("Project Id set", project.getId() > -1);
     Assert.assertEquals("Project name", projectName, project.getName());
     Assert.assertEquals("Project description", projectDescription,
         project.getDescription());
 
-    File testFile = new File("unit/project/testjob/testjob.zip");
+    final File testFile = new File("unit/project/testjob/testjob.zip");
 
     loader.uploadProjectFile(project.getId(), 1, testFile, user.getUserId());
 
-    ProjectFileHandler handler = loader.getUploadedFile(project.getId(), 1);
+    final ProjectFileHandler handler = loader.getUploadedFile(project.getId(), 1);
     Assert.assertEquals(handler.getProjectId(), project.getId());
     Assert.assertEquals(handler.getFileName(), "testjob.zip");
     Assert.assertEquals(handler.getVersion(), 1);
     Assert.assertEquals(handler.getFileType(), "zip");
-    File file = handler.getLocalFile();
+    final File file = handler.getLocalFile();
     Assert.assertTrue(handler.getLocalFile().exists());
     Assert.assertEquals(handler.getFileName(), "testjob.zip");
     Assert.assertEquals(handler.getUploader(), user.getUserId());
@@ -557,7 +566,7 @@ public class JdbcProjectLoaderTest {
   }
 
   // Custom equals for what I think is important
-  private void assertProjectMemberEquals(Project p1, Project p2) {
+  private void assertProjectMemberEquals(final Project p1, final Project p2) {
     Assert.assertEquals(p1.getId(), p2.getId());
     Assert.assertEquals(p1.getName(), p2.getName());
     Assert.assertEquals(p1.getCreateTimestamp(), p2.getCreateTimestamp());
@@ -571,70 +580,70 @@ public class JdbcProjectLoaderTest {
     assertGroupPermissionsEqual(p1, p2);
   }
 
-  private void assertUserPermissionsEqual(Project p1, Project p2) {
-    List<Pair<String, Permission>> perm1 = p1.getUserPermissions();
-    List<Pair<String, Permission>> perm2 = p2.getUserPermissions();
+  private void assertUserPermissionsEqual(final Project p1, final Project p2) {
+    final List<Pair<String, Permission>> perm1 = p1.getUserPermissions();
+    final List<Pair<String, Permission>> perm2 = p2.getUserPermissions();
 
     Assert.assertEquals(perm1.size(), perm2.size());
 
     {
-      HashMap<String, Permission> perm1Map = new HashMap<String, Permission>();
-      for (Pair<String, Permission> p : perm1) {
+      final HashMap<String, Permission> perm1Map = new HashMap<>();
+      for (final Pair<String, Permission> p : perm1) {
         perm1Map.put(p.getFirst(), p.getSecond());
       }
-      for (Pair<String, Permission> p : perm2) {
+      for (final Pair<String, Permission> p : perm2) {
         Assert.assertTrue(perm1Map.containsKey(p.getFirst()));
-        Permission perm = perm1Map.get(p.getFirst());
+        final Permission perm = perm1Map.get(p.getFirst());
         Assert.assertEquals(perm, p.getSecond());
       }
     }
 
     {
-      HashMap<String, Permission> perm2Map = new HashMap<String, Permission>();
-      for (Pair<String, Permission> p : perm2) {
+      final HashMap<String, Permission> perm2Map = new HashMap<>();
+      for (final Pair<String, Permission> p : perm2) {
         perm2Map.put(p.getFirst(), p.getSecond());
       }
-      for (Pair<String, Permission> p : perm1) {
+      for (final Pair<String, Permission> p : perm1) {
         Assert.assertTrue(perm2Map.containsKey(p.getFirst()));
-        Permission perm = perm2Map.get(p.getFirst());
+        final Permission perm = perm2Map.get(p.getFirst());
         Assert.assertEquals(perm, p.getSecond());
       }
     }
   }
 
-  private void assertGroupPermissionsEqual(Project p1, Project p2) {
-    List<Pair<String, Permission>> perm1 = p1.getGroupPermissions();
-    List<Pair<String, Permission>> perm2 = p2.getGroupPermissions();
+  private void assertGroupPermissionsEqual(final Project p1, final Project p2) {
+    final List<Pair<String, Permission>> perm1 = p1.getGroupPermissions();
+    final List<Pair<String, Permission>> perm2 = p2.getGroupPermissions();
 
     Assert.assertEquals(perm1.size(), perm2.size());
 
     {
-      HashMap<String, Permission> perm1Map = new HashMap<String, Permission>();
-      for (Pair<String, Permission> p : perm1) {
+      final HashMap<String, Permission> perm1Map = new HashMap<>();
+      for (final Pair<String, Permission> p : perm1) {
         perm1Map.put(p.getFirst(), p.getSecond());
       }
-      for (Pair<String, Permission> p : perm2) {
+      for (final Pair<String, Permission> p : perm2) {
         Assert.assertTrue(perm1Map.containsKey(p.getFirst()));
-        Permission perm = perm1Map.get(p.getFirst());
+        final Permission perm = perm1Map.get(p.getFirst());
         Assert.assertEquals(perm, p.getSecond());
       }
     }
 
     {
-      HashMap<String, Permission> perm2Map = new HashMap<String, Permission>();
-      for (Pair<String, Permission> p : perm2) {
+      final HashMap<String, Permission> perm2Map = new HashMap<>();
+      for (final Pair<String, Permission> p : perm2) {
         perm2Map.put(p.getFirst(), p.getSecond());
       }
-      for (Pair<String, Permission> p : perm1) {
+      for (final Pair<String, Permission> p : perm1) {
         Assert.assertTrue(perm2Map.containsKey(p.getFirst()));
-        Permission perm = perm2Map.get(p.getFirst());
+        final Permission perm = perm2Map.get(p.getFirst());
         Assert.assertEquals(perm, p.getSecond());
       }
     }
   }
 
   private ProjectLoader createLoader() {
-    Props props = new Props();
+    final Props props = new Props();
     props.put("database.type", "mysql");
 
     props.put("mysql.host", host);
@@ -658,8 +667,9 @@ public class JdbcProjectLoaderTest {
   }
 
   public static class CountHandler implements ResultSetHandler<Integer> {
+
     @Override
-    public Integer handle(ResultSet rs) throws SQLException {
+    public Integer handle(final ResultSet rs) throws SQLException {
       int val = 0;
       while (rs.next()) {
         val++;
diff --git a/azkaban-common/src/test/java/azkaban/project/MockProjectLoader.java b/azkaban-common/src/test/java/azkaban/project/MockProjectLoader.java
index 71f9e9b..c16e77d 100644
--- a/azkaban-common/src/test/java/azkaban/project/MockProjectLoader.java
+++ b/azkaban-common/src/test/java/azkaban/project/MockProjectLoader.java
@@ -16,6 +16,12 @@
 
 package azkaban.project;
 
+import azkaban.flow.Flow;
+import azkaban.project.ProjectLogEvent.EventType;
+import azkaban.user.Permission;
+import azkaban.user.User;
+import azkaban.utils.Props;
+import azkaban.utils.Triple;
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
@@ -23,34 +29,26 @@ import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
-import azkaban.project.ProjectLogEvent.EventType;
-import azkaban.flow.Flow;
-import azkaban.user.Permission;
-import azkaban.user.User;
-import azkaban.utils.Props;
-import azkaban.utils.Triple;
 import java.util.concurrent.ConcurrentHashMap;
 
 public class MockProjectLoader implements ProjectLoader {
-  public File dir;
-
-  public MockProjectLoader(File dir) {
-    this.dir = dir;
-  }
 
-  private ConcurrentHashMap<Integer, Project> projectsById =
+  private static int projectId = 0;
+  private final ConcurrentHashMap<Integer, Project> projectsById =
       new ConcurrentHashMap<>();
-  private ConcurrentHashMap<String, Project> projectsByName =
+  private final ConcurrentHashMap<String, Project> projectsByName =
       new ConcurrentHashMap<>();
+  public File dir;
 
-  private static int projectId = 0;
+  public MockProjectLoader(final File dir) {
+    this.dir = dir;
+  }
 
   @Override
   public List<Project> fetchAllActiveProjects() throws ProjectManagerException {
-    ArrayList<Project> activeProjects = new ArrayList<>();
-    for(Project project : projectsById.values()){
-      if(project.isActive()){
+    final ArrayList<Project> activeProjects = new ArrayList<>();
+    for (final Project project : this.projectsById.values()) {
+      if (project.isActive()) {
         activeProjects.add(project);
       }
     }
@@ -58,22 +56,22 @@ public class MockProjectLoader implements ProjectLoader {
   }
 
   @Override
-  public Project fetchProjectById(int id) throws ProjectManagerException {
+  public Project fetchProjectById(final int id) throws ProjectManagerException {
     System.out.println("MockProjectLoader: fetch project by id " + id);
-    if(!projectsById.containsKey(id)){
+    if (!this.projectsById.containsKey(id)) {
       throw new ProjectManagerException("Could not get project by id.");
     }
-    return projectsById.get(id);
+    return this.projectsById.get(id);
   }
 
   @Override
-  public Project createNewProject(String name, String description, User creator)
+  public Project createNewProject(final String name, final String description, final User creator)
       throws ProjectManagerException {
-    Project project = new Project(++projectId, name);
+    final Project project = new Project(++projectId, name);
     project.setDescription(description);
     project.setActive(true);
-    projectsById.put(project.getId(), project);
-    projectsByName.put(project.getName(), project);
+    this.projectsById.put(project.getId(), project);
+    this.projectsByName.put(project.getName(), project);
     System.out.println("MockProjectLoader: Created project " + project.getName() +
         ", id: " + project.getId() + ", description: " + description +
         ", user: " + creator.getUserId());
@@ -81,138 +79,141 @@ public class MockProjectLoader implements ProjectLoader {
   }
 
   @Override
-  public void removeProject(Project project, String user)
+  public void removeProject(final Project project, final String user)
       throws ProjectManagerException {
     project.setActive(false);
     System.out.println("MockProjectLoader: removed project " + project.getName());
   }
 
   @Override
-  public void updatePermission(Project project, String name, Permission perm,
-      boolean isGroup) throws ProjectManagerException {
+  public void updatePermission(final Project project, final String name, final Permission perm,
+      final boolean isGroup) throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void updateDescription(Project project, String description, String user)
+  public void updateDescription(final Project project, final String description, final String user)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public boolean postEvent(Project project, EventType type, String user,
-      String message) {
+  public boolean postEvent(final Project project, final EventType type, final String user,
+      final String message) {
     // TODO Auto-generated method stub
     return false;
   }
 
   @Override
-  public List<ProjectLogEvent> getProjectEvents(Project project, int num,
-      int skip) throws ProjectManagerException {
+  public List<ProjectLogEvent> getProjectEvents(final Project project, final int num,
+      final int skip) throws ProjectManagerException {
     // TODO Auto-generated method stub
     return null;
   }
 
   @Override
-  public void uploadProjectFile(int projectId, int version, File localFile, String user)
+  public void uploadProjectFile(final int projectId, final int version, final File localFile,
+      final String user)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void addProjectVersion(int projectId, int version, File localFile, String uploader, byte[] md5, String resourceId)
+  public void addProjectVersion(final int projectId, final int version, final File localFile,
+      final String uploader,
+      final byte[] md5, final String resourceId)
       throws ProjectManagerException {
 
   }
 
   @Override
-  public ProjectFileHandler fetchProjectMetaData(int projectId, int version) {
+  public ProjectFileHandler fetchProjectMetaData(final int projectId, final int version) {
     return null;
   }
 
   @Override
-  public ProjectFileHandler getUploadedFile(int projectId, int version)
+  public ProjectFileHandler getUploadedFile(final int projectId, final int version)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
     return null;
   }
 
   @Override
-  public void changeProjectVersion(Project project, int version, String user)
+  public void changeProjectVersion(final Project project, final int version, final String user)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void uploadFlows(Project project, int version, Collection<Flow> flows)
+  public void uploadFlows(final Project project, final int version, final Collection<Flow> flows)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void uploadFlow(Project project, int version, Flow flow)
+  public void uploadFlow(final Project project, final int version, final Flow flow)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public Flow fetchFlow(Project project, String flowId)
+  public Flow fetchFlow(final Project project, final String flowId)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
     return null;
   }
 
   @Override
-  public List<Flow> fetchAllProjectFlows(Project project)
+  public List<Flow> fetchAllProjectFlows(final Project project)
       throws ProjectManagerException {
     return new ArrayList<>();
   }
 
   @Override
-  public int getLatestProjectVersion(Project project)
+  public int getLatestProjectVersion(final Project project)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
     return 0;
   }
 
   @Override
-  public void uploadProjectProperty(Project project, Props props)
+  public void uploadProjectProperty(final Project project, final Props props)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void uploadProjectProperties(Project project, List<Props> properties)
+  public void uploadProjectProperties(final Project project, final List<Props> properties)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public Props fetchProjectProperty(Project project, String propsName)
+  public Props fetchProjectProperty(final Project project, final String propsName)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
     return null;
   }
 
   @Override
-  public Map<String, Props> fetchProjectProperties(int projectId, int version)
+  public Map<String, Props> fetchProjectProperties(final int projectId, final int version)
       throws ProjectManagerException {
-    Map<String, Props> propertyMap = new HashMap<String, Props>();
-    for (File file : dir.listFiles()) {
-      String name = file.getName();
+    final Map<String, Props> propertyMap = new HashMap<>();
+    for (final File file : this.dir.listFiles()) {
+      final String name = file.getName();
       if (name.endsWith(".job") || name.endsWith(".properties")) {
         try {
-          Props props = new Props(null, file);
+          final Props props = new Props(null, file);
           propertyMap.put(name, props);
-        } catch (IOException e) {
+        } catch (final IOException e) {
           throw new ProjectManagerException(e.getMessage());
         }
       }
@@ -222,60 +223,60 @@ public class MockProjectLoader implements ProjectLoader {
   }
 
   @Override
-  public void cleanOlderProjectVersion(int projectId, int version)
+  public void cleanOlderProjectVersion(final int projectId, final int version)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void removePermission(Project project, String name, boolean isGroup)
+  public void removePermission(final Project project, final String name, final boolean isGroup)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void updateProjectProperty(Project project, Props props)
+  public void updateProjectProperty(final Project project, final Props props)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public Props fetchProjectProperty(int projectId, int projectVer,
-      String propsName) throws ProjectManagerException {
+  public Props fetchProjectProperty(final int projectId, final int projectVer,
+      final String propsName) throws ProjectManagerException {
     // TODO Auto-generated method stub
     return null;
   }
 
   @Override
   public List<Triple<String, Boolean, Permission>> getProjectPermissions(
-      int projectId) throws ProjectManagerException {
+      final int projectId) throws ProjectManagerException {
     // TODO Auto-generated method stub
     return null;
   }
 
   @Override
-  public void updateProjectSettings(Project project)
+  public void updateProjectSettings(final Project project)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public void updateFlow(Project project, int version, Flow flow)
+  public void updateFlow(final Project project, final int version, final Flow flow)
       throws ProjectManagerException {
     // TODO Auto-generated method stub
 
   }
 
   @Override
-  public Project fetchProjectByName(String name) throws ProjectManagerException {
+  public Project fetchProjectByName(final String name) throws ProjectManagerException {
     System.out.println("MockProjectLoader: fetch project by name " + name);
-    if(!projectsByName.containsKey(name)){
+    if (!this.projectsByName.containsKey(name)) {
       throw new ProjectManagerException("Could not get project by name.");
     }
-    return projectsByName.get(name);
+    return this.projectsByName.get(name);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/project/ProjectManagerTest.java b/azkaban-common/src/test/java/azkaban/project/ProjectManagerTest.java
index f4f5adf..ed40d50 100644
--- a/azkaban-common/src/test/java/azkaban/project/ProjectManagerTest.java
+++ b/azkaban-common/src/test/java/azkaban/project/ProjectManagerTest.java
@@ -1,5 +1,15 @@
 package azkaban.project;
 
+import static org.mockito.Mockito.anyCollection;
+import static org.mockito.Mockito.anyList;
+import static org.mockito.Mockito.doAnswer;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
 import azkaban.storage.StorageManager;
 import azkaban.user.User;
 import azkaban.utils.Props;
@@ -9,15 +19,11 @@ import java.util.List;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
-import static org.mockito.Mockito.*;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.stubbing.Answer;
 
 public class ProjectManagerTest {
-  private ProjectManager manager;
-  private ProjectLoader loader;
-  private StorageManager storageManager;
-  private User user;
+
   private static final String PROJECT_NAME = "myTest";
   private static final String PROJECT_NAME_2 = "myTest_2";
   private static final String PROJECT_DESCRIPTION = "This is to test project manager";
@@ -27,50 +33,57 @@ public class ProjectManagerTest {
   private static final int PROJECT_ID_2 = 2;
   private static final int PROJECT_VERSION = 5;
   private static final int PROJECT_VERSION_RETENTIION = 3;
+  private ProjectManager manager;
+  private ProjectLoader loader;
+  private StorageManager storageManager;
+  private User user;
 
   @Before
   public void setUp() throws Exception {
-    Props props = new Props();
-    loader = mock(ProjectLoader.class);
-    storageManager = mock(StorageManager.class);
-    manager = new ProjectManager(loader, storageManager, props);
-    user = new User(TEST_USER);
-    Project project1 = new Project(PROJECT_ID, PROJECT_NAME);
+    final Props props = new Props();
+    this.loader = mock(ProjectLoader.class);
+    this.storageManager = mock(StorageManager.class);
+    this.manager = new ProjectManager(this.loader, this.storageManager, props);
+    this.user = new User(TEST_USER);
+    final Project project1 = new Project(PROJECT_ID, PROJECT_NAME);
     project1.setDescription(PROJECT_DESCRIPTION);
     project1.setActive(true);
     project1.setVersion(PROJECT_VERSION);
 
-    when(loader.createNewProject(PROJECT_NAME, PROJECT_DESCRIPTION, user)).thenReturn(project1);
-    when(loader.fetchProjectById(PROJECT_ID)).thenReturn(project1);
-    when(loader.fetchProjectByName(PROJECT_NAME)).thenReturn(project1);
-    when(loader.fetchAllProjectFlows(project1)).thenReturn(new ArrayList<>());
-    when(loader.getLatestProjectVersion(project1)).thenReturn(PROJECT_VERSION);
+    when(this.loader.createNewProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user))
+        .thenReturn(project1);
+    when(this.loader.fetchProjectById(PROJECT_ID)).thenReturn(project1);
+    when(this.loader.fetchProjectByName(PROJECT_NAME)).thenReturn(project1);
+    when(this.loader.fetchAllProjectFlows(project1)).thenReturn(new ArrayList<>());
+    when(this.loader.getLatestProjectVersion(project1)).thenReturn(PROJECT_VERSION);
 
     doAnswer(new Answer<Void>() {
       @Override
-      public Void answer(InvocationOnMock invocation) {
+      public Void answer(final InvocationOnMock invocation) {
         project1.setActive(false);
         return null;
       }
-    }).when(loader).removeProject(project1, user.getUserId());
+    }).when(this.loader).removeProject(project1, this.user.getUserId());
 
     doAnswer(new Answer<Void>() {
       @Override
-      public Void answer(InvocationOnMock invocation) {
+      public Void answer(final InvocationOnMock invocation) {
         project1.setVersion(PROJECT_VERSION + 1);
         return null;
       }
-    }).when(loader).changeProjectVersion(project1, PROJECT_VERSION + 1, user.getUserId());
+    }).when(this.loader).changeProjectVersion(project1, PROJECT_VERSION + 1, this.user.getUserId());
 
-    doThrow(ProjectManagerException.class).when(loader).fetchAllProjectFlows(null);
+    doThrow(ProjectManagerException.class).when(this.loader).fetchAllProjectFlows(null);
 
   }
 
   @Test
   public void testCreateProject() throws Exception {
     System.out.println("TestCreateProject");
-    Project project = manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    verify(loader).postEvent(project, ProjectLogEvent.EventType.CREATED, user.getUserId(), null);
+    final Project project = this.manager
+        .createProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user);
+    verify(this.loader)
+        .postEvent(project, ProjectLogEvent.EventType.CREATED, this.user.getUserId(), null);
     Assert.assertEquals("Project Id", PROJECT_ID, project.getId());
     Assert.assertEquals("Project name", PROJECT_NAME, project.getName());
     Assert.assertEquals("Project description", PROJECT_DESCRIPTION,
@@ -81,108 +94,118 @@ public class ProjectManagerTest {
   @Test(expected = ProjectManagerException.class)
   public void testCreateProjectWithEmptyName() throws Exception {
     System.out.println("TestCreateProjectWithEmptyName");
-    manager.createProject(null, PROJECT_DESCRIPTION, user);
+    this.manager.createProject(null, PROJECT_DESCRIPTION, this.user);
   }
 
   @Test(expected = ProjectManagerException.class)
   public void testCreateProjectWithInvalidName() throws Exception {
     System.out.println("TestCreateProjectWithInvalidName");
     //Project name must start with a letter, test invalid project name "123", should throw exception
-    manager.createProject("123", PROJECT_DESCRIPTION, user);
+    this.manager.createProject("123", PROJECT_DESCRIPTION, this.user);
   }
 
   @Test(expected = ProjectManagerException.class)
   public void testCreateProjectWithEmptyDescription() throws Exception {
     System.out.println("testCreateProjectWithEmptyDescription");
-    manager.createProject(PROJECT_NAME, null, user);
+    this.manager.createProject(PROJECT_NAME, null, this.user);
   }
 
   @Test(expected = ProjectManagerException.class)
   public void testCreateProjectWithEmptyUser() throws Exception {
     System.out.println("testCreateProjectWithEmptyUser");
-    manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, null);
+    this.manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, null);
   }
 
   @Test
   public void testRemoveProject() throws Exception {
     System.out.println("TestRemoveProject");
-    Project project = manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    manager.removeProject(project, user);
-    verify(loader).removeProject(project, user.getUserId());
-    verify(loader).postEvent(project, ProjectLogEvent.EventType.DELETED, user.getUserId(),
+    final Project project = this.manager
+        .createProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user);
+    this.manager.removeProject(project, this.user);
+    verify(this.loader).removeProject(project, this.user.getUserId());
+    verify(this.loader).postEvent(project, ProjectLogEvent.EventType.DELETED, this.user.getUserId(),
         null);
-    Project fetchedProject = manager.getProject(project.getId());
-    verify(loader).fetchProjectById(project.getId());
-    verify(loader).fetchAllProjectFlows(project);
+    final Project fetchedProject = this.manager.getProject(project.getId());
+    verify(this.loader).fetchProjectById(project.getId());
+    verify(this.loader).fetchAllProjectFlows(project);
     Assert.assertFalse(fetchedProject.isActive());
   }
 
   @Test
   public void testUploadProject() throws Exception {
     System.out.println("TestUploadProject");
-    Project project = manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    File testFile = new File(this.getClass().getClassLoader().getResource("project/testjob/testjob.zip").getFile());
+    final Project project = this.manager
+        .createProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user);
+    final File testFile = new File(
+        this.getClass().getClassLoader().getResource("project/testjob/testjob.zip").getFile());
     System.out.println("Uploading zip file: " + testFile.getAbsolutePath());
-    Props props = new Props();
-    manager.uploadProject(project, testFile, FILE_TYPE, user, props);
+    final Props props = new Props();
+    this.manager.uploadProject(project, testFile, FILE_TYPE, this.user, props);
 
-    verify(storageManager).uploadProject(project, PROJECT_VERSION + 1, testFile, user);
+    verify(this.storageManager).uploadProject(project, PROJECT_VERSION + 1, testFile, this.user);
 
-    verify(loader).uploadFlows(eq(project), eq(PROJECT_VERSION + 1), anyCollection());
-    verify(loader).changeProjectVersion(project, PROJECT_VERSION + 1, user.getUserId());
+    verify(this.loader).uploadFlows(eq(project), eq(PROJECT_VERSION + 1), anyCollection());
+    verify(this.loader).changeProjectVersion(project, PROJECT_VERSION + 1, this.user.getUserId());
     //uploadProjectProperties should be called twice, one for jobProps, the other for propProps
-    verify(loader, times(2)).uploadProjectProperties(eq(project), anyList());
-    verify(loader).postEvent(project, ProjectLogEvent.EventType.UPLOADED, user.getUserId(),
-        "Uploaded project files zip " + testFile.getName());
-    verify(loader).cleanOlderProjectVersion(project.getId(), PROJECT_VERSION + 1 - PROJECT_VERSION_RETENTIION);
+    verify(this.loader, times(2)).uploadProjectProperties(eq(project), anyList());
+    verify(this.loader)
+        .postEvent(project, ProjectLogEvent.EventType.UPLOADED, this.user.getUserId(),
+            "Uploaded project files zip " + testFile.getName());
+    verify(this.loader).cleanOlderProjectVersion(project.getId(),
+        PROJECT_VERSION + 1 - PROJECT_VERSION_RETENTIION);
   }
 
   @Test
   public void testFetchProjectByName() throws Exception {
     System.out.println("TestFetchProjectByName");
-    Project project = manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    Project fetchedProject = manager.getProject(project.getName());
+    final Project project = this.manager
+        .createProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user);
+    final Project fetchedProject = this.manager.getProject(project.getName());
     Assert.assertEquals("Fetched project by name", project, fetchedProject);
   }
 
   @Test(expected = RuntimeException.class)
   public void testFetchInvalidProjectByName() throws Exception {
     System.out.println("TestFetchInvalidProjectByName");
-    manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    manager.getProject("Invalid_Project");
+    this.manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user);
+    this.manager.getProject("Invalid_Project");
   }
 
   @Test
   public void testFetchProjectById() throws Exception {
     System.out.println("TestFetchProjectById");
-    Project project = manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    Project fetchedProject = manager.getProject(project.getId());
+    final Project project = this.manager
+        .createProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user);
+    final Project fetchedProject = this.manager.getProject(project.getId());
     Assert.assertEquals("Fetched project by id", project, fetchedProject);
   }
 
   @Test(expected = RuntimeException.class)
   public void testFetchInvalidProjectById() throws Exception {
     System.out.println("TestFetchInvalidProjectById");
-    manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    manager.getProject(100);
+    this.manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, this.user);
+    this.manager.getProject(100);
   }
 
   @Test
   public void testFetchAllProjects() throws Exception {
     System.out.println("TestFetchAllProjects");
-    List<Project> projects = new ArrayList<>();
-    Project new_project1 = manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION, user);
-    Project project2 = new Project(PROJECT_ID_2, PROJECT_NAME_2);
+    final List<Project> projects = new ArrayList<>();
+    final Project new_project1 = this.manager.createProject(PROJECT_NAME, PROJECT_DESCRIPTION,
+        this.user);
+    final Project project2 = new Project(PROJECT_ID_2, PROJECT_NAME_2);
     project2.setDescription(PROJECT_DESCRIPTION);
     project2.setActive(true);
     project2.setVersion(PROJECT_VERSION);
-    when(loader.createNewProject(PROJECT_NAME_2, PROJECT_DESCRIPTION, user)).thenReturn(project2);
-    Project new_project2 = manager.createProject(PROJECT_NAME_2, PROJECT_DESCRIPTION, user);
+    when(this.loader.createNewProject(PROJECT_NAME_2, PROJECT_DESCRIPTION, this.user))
+        .thenReturn(project2);
+    final Project new_project2 = this.manager
+        .createProject(PROJECT_NAME_2, PROJECT_DESCRIPTION, this.user);
     projects.add(new_project1);
     projects.add(new_project2);
 
-    when(loader.fetchAllActiveProjects()).thenReturn(projects);
-    List<Project> fetchedProjects = manager.getProjects();
+    when(this.loader.fetchAllActiveProjects()).thenReturn(projects);
+    final List<Project> fetchedProjects = this.manager.getProjects();
     Assert.assertEquals("Fetched projects: ", projects, fetchedProjects);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/project/ProjectSpecLoader.java b/azkaban-common/src/test/java/azkaban/project/ProjectSpecLoader.java
index 6fb9d25..d0b41e3 100644
--- a/azkaban-common/src/test/java/azkaban/project/ProjectSpecLoader.java
+++ b/azkaban-common/src/test/java/azkaban/project/ProjectSpecLoader.java
@@ -25,7 +25,7 @@ import org.yaml.snakeyaml.Yaml;
 
 public class ProjectSpecLoader {
 
-  public ProjectSpec load(File projectSpecFile) throws FileNotFoundException {
+  public ProjectSpec load(final File projectSpecFile) throws FileNotFoundException {
     return new Yaml().loadAs(new FileInputStream(projectSpecFile), ProjectSpec.class);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/project/ProjectSpecTest.java b/azkaban-common/src/test/java/azkaban/project/ProjectSpecTest.java
index a6a2c58..354292b 100644
--- a/azkaban-common/src/test/java/azkaban/project/ProjectSpecTest.java
+++ b/azkaban-common/src/test/java/azkaban/project/ProjectSpecTest.java
@@ -17,32 +17,30 @@
 
 package azkaban.project;
 
+import static org.junit.Assert.assertEquals;
+
 import java.io.File;
-import java.io.FileInputStream;
 import java.net.URI;
 import java.util.Map;
 import org.junit.Test;
-import org.yaml.snakeyaml.Yaml;
-
-import static org.junit.Assert.*;
 
 
 public class ProjectSpecTest {
 
   /**
    * Loads spec.yaml from test/resources and asserts properties
-   *
    */
   @Test
   public void testSpecLoad() throws Exception {
-    ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader.getResource("spec.yml").getFile());
-    ProjectSpec spec = new ProjectSpecLoader().load(file);
+    final ClassLoader classLoader = getClass().getClassLoader();
+    final File file = new File(classLoader.getResource("spec.yml").getFile());
+    final ProjectSpec spec = new ProjectSpecLoader().load(file);
 
     assertEquals("1.0", spec.getVersion());
 
-    Map<String, URI> fetchMap = spec.getPreExec().getFetch();
-    URI sampleUri = new URI("http://central.maven.org/maven2/log4j/log4j/1.2.17/log4j-1.2.17.jar");
+    final Map<String, URI> fetchMap = spec.getPreExec().getFetch();
+    final URI sampleUri = new URI(
+        "http://central.maven.org/maven2/log4j/log4j/1.2.17/log4j-1.2.17.jar");
     assertEquals(sampleUri, fetchMap.get("lib"));
     assertEquals(sampleUri, fetchMap.get("path/to/foo"));
   }
diff --git a/azkaban-common/src/test/java/azkaban/project/ProjectTest.java b/azkaban-common/src/test/java/azkaban/project/ProjectTest.java
index 63c19f0..5959f1d 100644
--- a/azkaban-common/src/test/java/azkaban/project/ProjectTest.java
+++ b/azkaban-common/src/test/java/azkaban/project/ProjectTest.java
@@ -16,30 +16,30 @@
 
 package azkaban.project;
 
-import org.junit.Test;
-
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
 
 import azkaban.user.Permission;
 import azkaban.user.Permission.Type;
 import azkaban.utils.JSONUtils;
+import org.junit.Test;
 
 public class ProjectTest {
+
   @Test
   public void testToAndFromObject() throws Exception {
-    Project project = new Project(1, "tesTing");
+    final Project project = new Project(1, "tesTing");
     project.setCreateTimestamp(1L);
     project.setLastModifiedTimestamp(2L);
     project.setDescription("I am a test");
-    project.setUserPermission("user1", new Permission(new Type[] { Type.ADMIN,
-        Type.EXECUTE }));
+    project.setUserPermission("user1", new Permission(new Type[]{Type.ADMIN,
+        Type.EXECUTE}));
 
-    Object obj = project.toObject();
-    String json = JSONUtils.toJSON(obj);
+    final Object obj = project.toObject();
+    final String json = JSONUtils.toJSON(obj);
 
-    Object jsonObj = JSONUtils.parseJSONFromString(json);
+    final Object jsonObj = JSONUtils.parseJSONFromString(json);
 
-    Project parsedProject = Project.projectFromObject(jsonObj);
+    final Project parsedProject = Project.projectFromObject(jsonObj);
 
     assertTrue(project.equals(parsedProject));
   }
diff --git a/azkaban-common/src/test/java/azkaban/project/validator/ValidationReportTest.java b/azkaban-common/src/test/java/azkaban/project/validator/ValidationReportTest.java
index 4b4bb18..1ecfeea 100644
--- a/azkaban-common/src/test/java/azkaban/project/validator/ValidationReportTest.java
+++ b/azkaban-common/src/test/java/azkaban/project/validator/ValidationReportTest.java
@@ -1,10 +1,9 @@
 package azkaban.project.validator;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 
 import java.util.HashSet;
 import java.util.Set;
-
 import org.junit.Test;
 
 /**
@@ -14,10 +13,10 @@ public class ValidationReportTest {
 
   @Test
   public void testAddWarnLevelInfoMsg() {
-    ValidationReport report = new ValidationReport();
-    String msg = "test warn level info message.";
+    final ValidationReport report = new ValidationReport();
+    final String msg = "test warn level info message.";
     report.addWarnLevelInfoMsg(msg);
-    for (String info : report.getInfoMsgs()) {
+    for (final String info : report.getInfoMsgs()) {
       assertEquals("Info message added through addWarnLevelInfoMsg should have level set to WARN",
           ValidationReport.getInfoMsgLevel(info), ValidationStatus.WARN);
       assertEquals("Retrieved info message does not match the original one.",
@@ -27,10 +26,10 @@ public class ValidationReportTest {
 
   @Test
   public void testAddErrorLevelInfoMsg() {
-    ValidationReport report = new ValidationReport();
-    String msg = "test error level error message.";
+    final ValidationReport report = new ValidationReport();
+    final String msg = "test error level error message.";
     report.addErrorLevelInfoMsg(msg);
-    for (String info : report.getInfoMsgs()) {
+    for (final String info : report.getInfoMsgs()) {
       assertEquals("Info message added through addErrorLevelInfoMsg should have level set to ERROR",
           ValidationReport.getInfoMsgLevel(info), ValidationStatus.ERROR);
       assertEquals("Retrieved info message does not match the original one.",
@@ -40,8 +39,8 @@ public class ValidationReportTest {
 
   @Test
   public void testAddMsgs() {
-    ValidationReport report = new ValidationReport();
-    Set<String> msgs = new HashSet<String>();
+    final ValidationReport report = new ValidationReport();
+    final Set<String> msgs = new HashSet<>();
     msgs.add("test msg 1.");
     msgs.add("test msg 2.");
     report.addWarningMsgs(msgs);
diff --git a/azkaban-common/src/test/java/azkaban/project/validator/XmlValidatorManagerTest.java b/azkaban-common/src/test/java/azkaban/project/validator/XmlValidatorManagerTest.java
index c1e1ab0..fc1da74 100644
--- a/azkaban-common/src/test/java/azkaban/project/validator/XmlValidatorManagerTest.java
+++ b/azkaban-common/src/test/java/azkaban/project/validator/XmlValidatorManagerTest.java
@@ -1,17 +1,15 @@
 package azkaban.project.validator;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 
+import azkaban.utils.Props;
+import com.google.common.io.Resources;
 import java.net.URL;
-
 import org.junit.Test;
 
-import com.google.common.io.Resources;
-
-import azkaban.utils.Props;
-
 public class XmlValidatorManagerTest {
-  private Props baseProps = new Props();
+
+  private final Props baseProps = new Props();
 
   /**
    * Test that if the validator directory does not exist, XmlValidatorManager
@@ -19,13 +17,16 @@ public class XmlValidatorManagerTest {
    */
   @Test
   public void testNoValidatorsDir() {
-    Props props = new Props(baseProps);
-
-    XmlValidatorManager manager = new XmlValidatorManager(props);
-    assertEquals("XmlValidatorManager should contain only the default validator when no xml configuration "
-        + "file is present.", manager.getValidatorsInfo().size(), 1);
-    assertEquals("XmlValidatorManager should contain only the default validator when no xml configuration "
-        + "file is present.", manager.getValidatorsInfo().get(0), XmlValidatorManager.DEFAULT_VALIDATOR_KEY);
+    final Props props = new Props(this.baseProps);
+
+    final XmlValidatorManager manager = new XmlValidatorManager(props);
+    assertEquals(
+        "XmlValidatorManager should contain only the default validator when no xml configuration "
+            + "file is present.", manager.getValidatorsInfo().size(), 1);
+    assertEquals(
+        "XmlValidatorManager should contain only the default validator when no xml configuration "
+            + "file is present.", manager.getValidatorsInfo().get(0),
+        XmlValidatorManager.DEFAULT_VALIDATOR_KEY);
   }
 
   /**
@@ -34,26 +35,29 @@ public class XmlValidatorManagerTest {
    */
   @Test
   public void testDefaultValidator() {
-    Props props = new Props(baseProps);
-    URL validatorUrl = Resources.getResource("project/testValidators");
+    final Props props = new Props(this.baseProps);
+    final URL validatorUrl = Resources.getResource("project/testValidators");
     props.put(ValidatorConfigs.VALIDATOR_PLUGIN_DIR, validatorUrl.getPath());
 
-    XmlValidatorManager manager = new XmlValidatorManager(props);
-    assertEquals("XmlValidatorManager should contain only the default validator when no xml configuration "
-        + "file is present.", manager.getValidatorsInfo().size(), 1);
-    assertEquals("XmlValidatorManager should contain only the default validator when no xml configuration "
-        + "file is present.", manager.getValidatorsInfo().get(0), XmlValidatorManager.DEFAULT_VALIDATOR_KEY);
+    final XmlValidatorManager manager = new XmlValidatorManager(props);
+    assertEquals(
+        "XmlValidatorManager should contain only the default validator when no xml configuration "
+            + "file is present.", manager.getValidatorsInfo().size(), 1);
+    assertEquals(
+        "XmlValidatorManager should contain only the default validator when no xml configuration "
+            + "file is present.", manager.getValidatorsInfo().get(0),
+        XmlValidatorManager.DEFAULT_VALIDATOR_KEY);
   }
 
   /**
    * Test that if the xml config file specifies a validator classname that does not exist,
    * XmlValidatorManager should throw an exception.
    */
-  @Test(expected=ValidatorManagerException.class)
+  @Test(expected = ValidatorManagerException.class)
   public void testValidatorDoesNotExist() {
-    Props props = new Props(baseProps);
-    URL validatorUrl = Resources.getResource("project/testValidators");
-    URL configUrl = Resources.getResource("test-conf/azkaban-validators-test1.xml");
+    final Props props = new Props(this.baseProps);
+    final URL validatorUrl = Resources.getResource("project/testValidators");
+    final URL configUrl = Resources.getResource("test-conf/azkaban-validators-test1.xml");
     props.put(ValidatorConfigs.VALIDATOR_PLUGIN_DIR, validatorUrl.getPath());
     props.put(ValidatorConfigs.XML_FILE_PARAM, configUrl.getPath());
 
@@ -64,19 +68,22 @@ public class XmlValidatorManagerTest {
   /**
    * Test that if the xml config file is properly set, XmlValidatorManager loads both the default
    * validator and the one specified in the xml file. The TestValidator class specified in the xml
-   * configuration file is located with the jar file inside test resource directory project/testValidators.
+   * configuration file is located with the jar file inside test resource directory
+   * project/testValidators.
    */
   @Test
   public void testLoadValidators() {
-    Props props = new Props(baseProps);
-    URL validatorUrl = Resources.getResource("project/testValidators");
-    URL configUrl = Resources.getResource("test-conf/azkaban-validators-test2.xml");
+    final Props props = new Props(this.baseProps);
+    final URL validatorUrl = Resources.getResource("project/testValidators");
+    final URL configUrl = Resources.getResource("test-conf/azkaban-validators-test2.xml");
     props.put(ValidatorConfigs.VALIDATOR_PLUGIN_DIR, validatorUrl.getPath());
     props.put(ValidatorConfigs.XML_FILE_PARAM, configUrl.getPath());
 
-    XmlValidatorManager manager = new XmlValidatorManager(props);
-    assertEquals("XmlValidatorManager should contain 2 validators.", manager.getValidatorsInfo().size(), 2);
-    assertEquals("XmlValidatorManager should contain the validator specified in the xml configuration file.",
+    final XmlValidatorManager manager = new XmlValidatorManager(props);
+    assertEquals("XmlValidatorManager should contain 2 validators.",
+        manager.getValidatorsInfo().size(), 2);
+    assertEquals(
+        "XmlValidatorManager should contain the validator specified in the xml configuration file.",
         manager.getValidatorsInfo().get(1), "Test");
   }
 
diff --git a/azkaban-common/src/test/java/azkaban/server/HttpRequestUtilsTest.java b/azkaban-common/src/test/java/azkaban/server/HttpRequestUtilsTest.java
index 24f6ef7..b92bfdd 100644
--- a/azkaban-common/src/test/java/azkaban/server/HttpRequestUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/server/HttpRequestUtilsTest.java
@@ -16,13 +16,6 @@
 
 package azkaban.server;
 
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.junit.Assert;
-import org.junit.Test;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
 import azkaban.executor.ExecutorManagerException;
@@ -31,59 +24,65 @@ import azkaban.user.User;
 import azkaban.user.UserManager;
 import azkaban.user.UserManagerException;
 import azkaban.utils.TestUtils;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import org.junit.Assert;
+import org.junit.Test;
 
 /**
  * Test class for HttpRequestUtils
  */
 public final class HttpRequestUtilsTest {
+
   /* Helper method to get a test flow and add required properties */
   public static ExecutableFlow createExecutableFlow() throws IOException {
-    ExecutableFlow flow = TestUtils.createExecutableFlow("exectest1", "exec1");
+    final ExecutableFlow flow = TestUtils.createExecutableFlow("exectest1", "exec1");
     flow.getExecutionOptions().getFlowParameters()
-      .put(ExecutionOptions.FLOW_PRIORITY, "1");
+        .put(ExecutionOptions.FLOW_PRIORITY, "1");
     flow.getExecutionOptions().getFlowParameters()
-      .put(ExecutionOptions.USE_EXECUTOR, "2");
+        .put(ExecutionOptions.USE_EXECUTOR, "2");
     return flow;
   }
 
   /* Test that flow properties are removed for non-admin user */
   @Test
   public void TestFilterNonAdminOnlyFlowParams() throws IOException,
-    ExecutorManagerException, UserManagerException {
-    ExecutableFlow flow = createExecutableFlow();
-    UserManager manager = TestUtils.createTestXmlUserManager();
-    User user = manager.getUser("testUser", "testUser");
+      ExecutorManagerException, UserManagerException {
+    final ExecutableFlow flow = createExecutableFlow();
+    final UserManager manager = TestUtils.createTestXmlUserManager();
+    final User user = manager.getUser("testUser", "testUser");
 
     HttpRequestUtils.filterAdminOnlyFlowParams(manager,
-      flow.getExecutionOptions(), user);
+        flow.getExecutionOptions(), user);
 
     Assert.assertFalse(flow.getExecutionOptions().getFlowParameters()
-      .containsKey(ExecutionOptions.FLOW_PRIORITY));
+        .containsKey(ExecutionOptions.FLOW_PRIORITY));
     Assert.assertFalse(flow.getExecutionOptions().getFlowParameters()
-      .containsKey(ExecutionOptions.USE_EXECUTOR));
+        .containsKey(ExecutionOptions.USE_EXECUTOR));
   }
 
   /* Test that flow properties are retained for admin user */
   @Test
   public void TestFilterAdminOnlyFlowParams() throws IOException,
-    ExecutorManagerException, UserManagerException {
-    ExecutableFlow flow = createExecutableFlow();
-    UserManager manager = TestUtils.createTestXmlUserManager();
-    User user = manager.getUser("testAdmin", "testAdmin");
+      ExecutorManagerException, UserManagerException {
+    final ExecutableFlow flow = createExecutableFlow();
+    final UserManager manager = TestUtils.createTestXmlUserManager();
+    final User user = manager.getUser("testAdmin", "testAdmin");
 
     HttpRequestUtils.filterAdminOnlyFlowParams(manager,
-      flow.getExecutionOptions(), user);
+        flow.getExecutionOptions(), user);
 
     Assert.assertTrue(flow.getExecutionOptions().getFlowParameters()
-      .containsKey(ExecutionOptions.FLOW_PRIORITY));
+        .containsKey(ExecutionOptions.FLOW_PRIORITY));
     Assert.assertTrue(flow.getExecutionOptions().getFlowParameters()
-      .containsKey(ExecutionOptions.USE_EXECUTOR));
+        .containsKey(ExecutionOptions.USE_EXECUTOR));
   }
 
   /* Test exception, if param is a valid integer */
   @Test
   public void testvalidIntegerParam() throws ExecutorManagerException {
-    Map<String, String> params = new HashMap<String, String>();
+    final Map<String, String> params = new HashMap<>();
     params.put("param1", "123");
     HttpRequestUtils.validateIntegerParam(params, "param1");
   }
@@ -91,7 +90,7 @@ public final class HttpRequestUtilsTest {
   /* Test exception, if param is not a valid integer */
   @Test(expected = ExecutorManagerException.class)
   public void testInvalidIntegerParam() throws ExecutorManagerException {
-    Map<String, String> params = new HashMap<String, String>();
+    final Map<String, String> params = new HashMap<>();
     params.put("param1", "1dff2");
     HttpRequestUtils.validateIntegerParam(params, "param1");
   }
@@ -99,18 +98,18 @@ public final class HttpRequestUtilsTest {
   /* Verify permission for admin user */
   @Test
   public void testHasAdminPermission() throws UserManagerException {
-    UserManager manager = TestUtils.createTestXmlUserManager();
-    User adminUser = manager.getUser("testAdmin", "testAdmin");
+    final UserManager manager = TestUtils.createTestXmlUserManager();
+    final User adminUser = manager.getUser("testAdmin", "testAdmin");
     Assert.assertTrue(HttpRequestUtils.hasPermission(manager, adminUser,
-      Type.ADMIN));
+        Type.ADMIN));
   }
 
   /* verify permission for non-admin user */
   @Test
   public void testHasOrdinaryPermission() throws UserManagerException {
-    UserManager manager = TestUtils.createTestXmlUserManager();
-    User testUser = manager.getUser("testUser", "testUser");
+    final UserManager manager = TestUtils.createTestXmlUserManager();
+    final User testUser = manager.getUser("testUser", "testUser");
     Assert.assertFalse(HttpRequestUtils.hasPermission(manager, testUser,
-      Type.ADMIN));
+        Type.ADMIN));
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/ServiceProviderTest.java b/azkaban-common/src/test/java/azkaban/ServiceProviderTest.java
index 87a5998..a44d31b 100644
--- a/azkaban-common/src/test/java/azkaban/ServiceProviderTest.java
+++ b/azkaban-common/src/test/java/azkaban/ServiceProviderTest.java
@@ -17,6 +17,9 @@
 
 package azkaban;
 
+import static azkaban.ServiceProvider.SERVICE_PROVIDER;
+import static org.junit.Assert.assertNotNull;
+
 import azkaban.db.DatabaseOperator;
 import azkaban.project.JdbcProjectLoader;
 import azkaban.spi.Storage;
@@ -31,9 +34,6 @@ import org.apache.commons.io.FileUtils;
 import org.junit.After;
 import org.junit.Test;
 
-import static azkaban.ServiceProvider.*;
-import static org.junit.Assert.*;
-
 
 public class ServiceProviderTest {
 
@@ -46,13 +46,13 @@ public class ServiceProviderTest {
 
   @Test
   public void testInjections() throws Exception {
-    Props props = new Props();
+    final Props props = new Props();
     props.put("database.type", "h2");
     props.put("h2.path", "h2");
-    props.put(Constants.ConfigurationKeys.AZKABAN_STORAGE_LOCAL_BASEDIR, AZKABAN_LOCAL_TEST_STORAGE);
-
+    props
+        .put(Constants.ConfigurationKeys.AZKABAN_STORAGE_LOCAL_BASEDIR, AZKABAN_LOCAL_TEST_STORAGE);
 
-    Injector injector = Guice.createInjector(
+    final Injector injector = Guice.createInjector(
         new AzkabanCommonModule(props)
     );
     SERVICE_PROVIDER.unsetInjector();
diff --git a/azkaban-common/src/test/java/azkaban/storage/DatabaseStorageTest.java b/azkaban-common/src/test/java/azkaban/storage/DatabaseStorageTest.java
index 69b51b9..08bcde4 100644
--- a/azkaban-common/src/test/java/azkaban/storage/DatabaseStorageTest.java
+++ b/azkaban-common/src/test/java/azkaban/storage/DatabaseStorageTest.java
@@ -17,26 +17,28 @@
 
 package azkaban.storage;
 
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+
 import azkaban.project.ProjectLoader;
 import azkaban.spi.StorageMetadata;
 import java.io.File;
 import org.junit.Test;
 
-import static org.mockito.Mockito.*;
-
 
 public class DatabaseStorageTest {
+
   private final ProjectLoader projectLoader = mock(ProjectLoader.class);
-  private final DatabaseStorage databaseStorage = new DatabaseStorage(projectLoader);
+  private final DatabaseStorage databaseStorage = new DatabaseStorage(this.projectLoader);
 
   @Test
   public void testPut() throws Exception {
     final File file = mock(File.class);
-    int projectId = 1234;
-    int version = 1;
-    String uploader = "testuser";
+    final int projectId = 1234;
+    final int version = 1;
+    final String uploader = "testuser";
     final StorageMetadata metadata = new StorageMetadata(projectId, version, uploader, null);
-    databaseStorage.put(metadata, file);
-    verify(projectLoader).uploadProjectFile(projectId, version, file, uploader);
+    this.databaseStorage.put(metadata, file);
+    verify(this.projectLoader).uploadProjectFile(projectId, version, file, uploader);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/storage/HdfsStorageTest.java b/azkaban-common/src/test/java/azkaban/storage/HdfsStorageTest.java
index a5dc030..99297c3 100644
--- a/azkaban-common/src/test/java/azkaban/storage/HdfsStorageTest.java
+++ b/azkaban-common/src/test/java/azkaban/storage/HdfsStorageTest.java
@@ -17,6 +17,11 @@
 
 package azkaban.storage;
 
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
 import azkaban.AzkabanCommonModuleConfig;
 import azkaban.spi.StorageMetadata;
 import azkaban.utils.Md5Hasher;
@@ -29,44 +34,44 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import static org.mockito.Mockito.*;
-
 
 public class HdfsStorageTest {
+
   private HdfsAuth hdfsAuth;
   private HdfsStorage hdfsStorage;
   private FileSystem hdfs;
 
   @Before
   public void setUp() throws Exception {
-    hdfs = mock(FileSystem.class);
-    hdfsAuth = mock(HdfsAuth.class);
-    AzkabanCommonModuleConfig config = mock(AzkabanCommonModuleConfig.class);
+    this.hdfs = mock(FileSystem.class);
+    this.hdfsAuth = mock(HdfsAuth.class);
+    final AzkabanCommonModuleConfig config = mock(AzkabanCommonModuleConfig.class);
     when(config.getHdfsRootUri()).thenReturn(URI.create("hdfs://localhost:9000/path/to/foo"));
 
-    hdfsStorage = new HdfsStorage(hdfsAuth, hdfs, config);
+    this.hdfsStorage = new HdfsStorage(this.hdfsAuth, this.hdfs, config);
   }
 
   @Test
   public void testGet() throws Exception {
-    hdfsStorage.get("1/1-hash.zip");
-    verify(hdfs).open(new Path("hdfs://localhost:9000/path/to/foo/1/1-hash.zip"));
+    this.hdfsStorage.get("1/1-hash.zip");
+    verify(this.hdfs).open(new Path("hdfs://localhost:9000/path/to/foo/1/1-hash.zip"));
   }
 
   @Test
   public void testPut() throws Exception {
-    File file = new File(getClass().getClassLoader().getResource("sample_flow_01.zip").getFile());
+    final File file = new File(
+        getClass().getClassLoader().getResource("sample_flow_01.zip").getFile());
     final String hash = new String(Hex.encodeHex(Md5Hasher.md5Hash(file)));
 
-    when(hdfs.exists(any(Path.class))).thenReturn(false);
+    when(this.hdfs.exists(any(Path.class))).thenReturn(false);
 
-    StorageMetadata metadata = new StorageMetadata(1, 2, "uploader", Md5Hasher.md5Hash(file));
-    String key = hdfsStorage.put(metadata, file);
+    final StorageMetadata metadata = new StorageMetadata(1, 2, "uploader", Md5Hasher.md5Hash(file));
+    final String key = this.hdfsStorage.put(metadata, file);
 
     final String expectedName = String.format("1/1-%s.zip", hash);
     Assert.assertEquals(expectedName, key);
 
     final String expectedPath = "/path/to/foo/" + expectedName;
-    verify(hdfs).copyFromLocalFile(new Path(file.getAbsolutePath()), new Path(expectedPath));
+    verify(this.hdfs).copyFromLocalFile(new Path(file.getAbsolutePath()), new Path(expectedPath));
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/storage/LocalStorageTest.java b/azkaban-common/src/test/java/azkaban/storage/LocalStorageTest.java
index d7b098a..4e52d6c 100644
--- a/azkaban-common/src/test/java/azkaban/storage/LocalStorageTest.java
+++ b/azkaban-common/src/test/java/azkaban/storage/LocalStorageTest.java
@@ -17,6 +17,11 @@
 
 package azkaban.storage;
 
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
 import azkaban.AzkabanCommonModuleConfig;
 import azkaban.spi.StorageMetadata;
 import azkaban.utils.Md5Hasher;
@@ -28,26 +33,22 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
-
 
 public class LocalStorageTest {
-  private static final Logger log = Logger.getLogger(LocalStorageTest.class);
 
   static final String SAMPLE_FILE = "sample_flow_01.zip";
   static final String LOCAL_STORAGE = "LOCAL_STORAGE";
   static final File BASE_DIRECTORY = new File(LOCAL_STORAGE);
-
+  private static final Logger log = Logger.getLogger(LocalStorageTest.class);
   private LocalStorage localStorage;
 
   @Before
   public void setUp() throws Exception {
     tearDown();
     BASE_DIRECTORY.mkdir();
-    AzkabanCommonModuleConfig config = mock(AzkabanCommonModuleConfig.class);
+    final AzkabanCommonModuleConfig config = mock(AzkabanCommonModuleConfig.class);
     when(config.getLocalStorageBaseDirPath()).thenReturn(LOCAL_STORAGE);
-    localStorage = new LocalStorage(config);
+    this.localStorage = new LocalStorage(config);
   }
 
   @After
@@ -57,16 +58,16 @@ public class LocalStorageTest {
 
   @Test
   public void testPutGet() throws Exception {
-    ClassLoader classLoader = getClass().getClassLoader();
-    File testFile = new File(classLoader.getResource(SAMPLE_FILE).getFile());
+    final ClassLoader classLoader = getClass().getClassLoader();
+    final File testFile = new File(classLoader.getResource(SAMPLE_FILE).getFile());
 
     final StorageMetadata metadata = new StorageMetadata(
         1, 1, "testuser", Md5Hasher.md5Hash(testFile));
-    final String key = localStorage.put(metadata, testFile);
+    final String key = this.localStorage.put(metadata, testFile);
     assertNotNull(key);
     log.info("Key URI: " + key);
 
-    File expectedTargetFile = new File(BASE_DIRECTORY, new StringBuilder()
+    final File expectedTargetFile = new File(BASE_DIRECTORY, new StringBuilder()
         .append(metadata.getProjectId())
         .append(File.separator)
         .append(metadata.getProjectId())
@@ -79,9 +80,9 @@ public class LocalStorageTest {
     assertTrue(FileUtils.contentEquals(testFile, expectedTargetFile));
 
     // test get
-    InputStream getIs = localStorage.get(key);
+    final InputStream getIs = this.localStorage.get(key);
     assertNotNull(getIs);
-    File getFile = new File("tmp.get");
+    final File getFile = new File("tmp.get");
     FileUtils.copyInputStreamToFile(getIs, getFile);
     assertTrue(FileUtils.contentEquals(testFile, getFile));
     getFile.delete();
diff --git a/azkaban-common/src/test/java/azkaban/test/Utils.java b/azkaban-common/src/test/java/azkaban/test/Utils.java
index e7b2c78..c8e1699 100644
--- a/azkaban-common/src/test/java/azkaban/test/Utils.java
+++ b/azkaban-common/src/test/java/azkaban/test/Utils.java
@@ -1,15 +1,16 @@
 package azkaban.test;
 
+import static azkaban.ServiceProvider.SERVICE_PROVIDER;
+
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
 
-import static azkaban.ServiceProvider.*;
-
 
 public class Utils {
+
   public static void initServiceProvider() {
-    Injector injector = Guice.createInjector(new AbstractModule() {
+    final Injector injector = Guice.createInjector(new AbstractModule() {
       @Override
       protected void configure() {
       }
diff --git a/azkaban-common/src/test/java/azkaban/trigger/BasicTimeCheckerTest.java b/azkaban-common/src/test/java/azkaban/trigger/BasicTimeCheckerTest.java
index 38b0f4d..71f69d4 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/BasicTimeCheckerTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/BasicTimeCheckerTest.java
@@ -16,29 +16,26 @@
 
 package azkaban.trigger;
 
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.utils.Utils;
 import java.util.HashMap;
 import java.util.Map;
-
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
 import org.joda.time.ReadablePeriod;
-
 import org.junit.Test;
 
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import azkaban.utils.Utils;
-import azkaban.trigger.builtin.BasicTimeChecker;
-
 public class BasicTimeCheckerTest {
 
 
-  private Condition getCondition(BasicTimeChecker timeChecker){
-    Map<String, ConditionChecker> checkers =
-        new HashMap<String, ConditionChecker>();
+  private Condition getCondition(final BasicTimeChecker timeChecker) {
+    final Map<String, ConditionChecker> checkers =
+        new HashMap<>();
     checkers.put(timeChecker.getId(), timeChecker);
-    String expr = timeChecker.getId() + ".eval()";
+    final String expr = timeChecker.getId() + ".eval()";
 
     return new Condition(checkers, expr);
   }
@@ -48,21 +45,21 @@ public class BasicTimeCheckerTest {
 
     // get a new timechecker, start from now, repeat every minute. should
     // evaluate to false now, and true a minute later.
-    DateTime now = DateTime.now();
-    ReadablePeriod period = Utils.parsePeriodString("10s");
+    final DateTime now = DateTime.now();
+    final ReadablePeriod period = Utils.parsePeriodString("10s");
 
-    BasicTimeChecker timeChecker =
+    final BasicTimeChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(),
             now.getZone(), true, true, period, null);
 
-    Condition cond = getCondition(timeChecker);
+    final Condition cond = getCondition(timeChecker);
 
     assertFalse(cond.isMet());
 
     // sleep for 1 min
     try {
       Thread.sleep(10000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
@@ -74,7 +71,7 @@ public class BasicTimeCheckerTest {
     // sleep for 1 min
     try {
       Thread.sleep(10000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
@@ -88,106 +85,111 @@ public class BasicTimeCheckerTest {
   @Test
   public void testQuartzCurrentZone() {
 
-    DateTime now = DateTime.now();
-    String cronExpression = "0 0 0 31 12 ? 2050";
+    final DateTime now = DateTime.now();
+    final String cronExpression = "0 0 0 31 12 ? 2050";
 
-    BasicTimeChecker timeChecker =
+    final BasicTimeChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(),
             now.getZone(), true, true, null, cronExpression);
     System.out.println("getNextCheckTime = " + timeChecker.getNextCheckTime());
 
-    Condition cond = getCondition(timeChecker);
+    final Condition cond = getCondition(timeChecker);
     // 2556086400000L represent for "2050-12-31T00:00:00.000-08:00"
 
-    DateTime year2050 = new DateTime(2050, 12, 31, 0 ,0 ,0 ,now.getZone());
+    final DateTime year2050 = new DateTime(2050, 12, 31, 0, 0, 0, now.getZone());
     assertTrue(cond.getNextCheckTime() == year2050.getMillis());
   }
 
   /**
-   * Test when PST-->PDT happens in 2020. -8:00 -> -7:00
-   * See details why confusion happens during this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
+   * Test when PST-->PDT happens in 2020. -8:00 -> -7:00 See details why confusion happens during
+   * this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
    *
-   * This test demonstrates that if the cron is under UTC settings,
-   * When daylight saving change occurs, 2:30 will be changed to 3:30 at that day.
+   * This test demonstrates that if the cron is under UTC settings, When daylight saving change
+   * occurs, 2:30 will be changed to 3:30 at that day.
    */
   @Test
   public void testPSTtoPDTunderUTC() {
 
-    DateTime now = DateTime.now();
+    final DateTime now = DateTime.now();
 
     // 10:30 UTC == 2:30 PST
-    String cronExpression = "0 30 10 8 3 ? 2020";
+    final String cronExpression = "0 30 10 8 3 ? 2020";
 
-    BasicTimeChecker timeChecker =
+    final BasicTimeChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(),
             DateTimeZone.UTC, true, true, null, cronExpression);
     System.out.println("getNextCheckTime = " + timeChecker.getNextCheckTime());
 
-    Condition cond = getCondition(timeChecker);
+    final Condition cond = getCondition(timeChecker);
 
-    DateTime spring2020UTC = new DateTime(2020, 3, 8, 10, 30, 0, DateTimeZone.UTC);
-    DateTime spring2020PDT = new DateTime(2020, 3, 8, 3, 30, 0, DateTimeZone.forID("America/Los_Angeles"));
+    final DateTime spring2020UTC = new DateTime(2020, 3, 8, 10, 30, 0, DateTimeZone.UTC);
+    final DateTime spring2020PDT = new DateTime(2020, 3, 8, 3, 30, 0,
+        DateTimeZone.forID("America/Los_Angeles"));
     assertTrue(cond.getNextCheckTime() == spring2020UTC.getMillis());
     assertTrue(cond.getNextCheckTime() == spring2020PDT.getMillis());
   }
 
   /**
-   * Test when PST-->PDT happens in 2020. -8:00 -> -7:00
-   * See details why confusion happens during this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
+   * Test when PST-->PDT happens in 2020. -8:00 -> -7:00 See details why confusion happens during
+   * this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
    *
-   * This test demonstrates that 2:30 AM will not happen during the daylight saving day on Cron settings under PDT/PST.
-   * Since we let the cron triggered both at March 8th, and 9th, it will execute at March 9th.
+   * This test demonstrates that 2:30 AM will not happen during the daylight saving day on Cron
+   * settings under PDT/PST. Since we let the cron triggered both at March 8th, and 9th, it will
+   * execute at March 9th.
    */
   @Test
   public void testPSTtoPDTdst2() {
 
-    DateTime now = DateTime.now();
+    final DateTime now = DateTime.now();
 
-    String cronExpression = "0 30 2 8,9 3 ? 2020";
+    final String cronExpression = "0 30 2 8,9 3 ? 2020";
 
-    BasicTimeChecker timeChecker =
+    final BasicTimeChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecker_1", now.getMillis(),
             DateTimeZone.forID("America/Los_Angeles"), true, true, null, cronExpression);
     System.out.println("getNextCheckTime = " + timeChecker.getNextCheckTime());
 
-    Condition cond = getCondition(timeChecker);
+    final Condition cond = getCondition(timeChecker);
 
-    DateTime aTime = new DateTime(2020, 3, 9, 2, 30, 0, DateTimeZone.forID("America/Los_Angeles"));
+    final DateTime aTime = new DateTime(2020, 3, 9, 2, 30, 0,
+        DateTimeZone.forID("America/Los_Angeles"));
     assertTrue(cond.getNextCheckTime() == aTime.getMillis());
   }
 
   /**
-   * Test when PDT-->PST happens in 2020. -7:00 -> -8:00
-   * See details why confusion happens during this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
+   * Test when PDT-->PST happens in 2020. -7:00 -> -8:00 See details why confusion happens during
+   * this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
    *
-   * This test cronDayLightPacificWinter1 is in order to compare against the cronDayLightPacificWinter2.
+   * This test cronDayLightPacificWinter1 is in order to compare against the
+   * cronDayLightPacificWinter2.
    *
-   * In this Test, we let job run at 1:00 at Nov.1st, 2020. We know that we will have two 1:00 at that day.
-   * The test shows that the first 1:00 is skipped at that day.
-   * Schedule will still be executed once on that day.
+   * In this Test, we let job run at 1:00 at Nov.1st, 2020. We know that we will have two 1:00 at
+   * that day. The test shows that the first 1:00 is skipped at that day. Schedule will still be
+   * executed once on that day.
    */
   @Test
   public void testPDTtoPSTdst1() {
 
-    DateTime now = DateTime.now();
+    final DateTime now = DateTime.now();
 
     // 9:00 UTC == 1:00 PST (difference is 8 hours)
-    String cronExpression = "0 0 1 1,2 11 ? 2020";
+    final String cronExpression = "0 0 1 1,2 11 ? 2020";
 
-    BasicTimeChecker timeChecker =
+    final BasicTimeChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(),
             DateTimeZone.forID("America/Los_Angeles"), true, true, null, cronExpression);
     System.out.println("getNextCheckTime = " + timeChecker.getNextCheckTime());
 
-    Condition cond = getCondition(timeChecker);
+    final Condition cond = getCondition(timeChecker);
 
-    DateTime winter2020 = new DateTime(2020, 11, 1, 9, 0, 0, DateTimeZone.UTC);
+    final DateTime winter2020 = new DateTime(2020, 11, 1, 9, 0, 0, DateTimeZone.UTC);
 
-    DateTime winter2020_2 = new DateTime(2020, 11, 1, 1, 0, 0, DateTimeZone.forID("America/Los_Angeles"));
-    DateTime winter2020_3 = new DateTime(2020, 11, 1, 2, 0, 0, DateTimeZone.forID("America/Los_Angeles"));
+    final DateTime winter2020_2 = new DateTime(2020, 11, 1, 1, 0, 0,
+        DateTimeZone.forID("America/Los_Angeles"));
+    final DateTime winter2020_3 = new DateTime(2020, 11, 1, 2, 0, 0,
+        DateTimeZone.forID("America/Los_Angeles"));
     assertTrue(cond.getNextCheckTime() == winter2020.getMillis());
 
-
     // Both 1 and 2 o'clock can not pass the test. Based on milliseconds we got,
     // winter2020_2.getMillis() == 11/1/2020, 1:00:00 AM GMT-7:00 DST
     // winter2020_3.getMillis() == 11/1/2020, 2:00:00 AM GMT-8:00
@@ -198,32 +200,34 @@ public class BasicTimeCheckerTest {
 
 
   /**
-   * Test when PDT-->PST happens in 2020. -7:00 -> -8:00
-   * See details why confusion happens during this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
+   * Test when PDT-->PST happens in 2020. -7:00 -> -8:00 See details why confusion happens during
+   * this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
    *
-   * This test cronDayLightPacificWinter2 is in order to be compared against the cronDayLightPacificWinter1.
+   * This test cronDayLightPacificWinter2 is in order to be compared against the
+   * cronDayLightPacificWinter1.
    *
-   * In this Test, we let job run at 0:59 at Nov.1st, 2020. it shows that it is 7:59 UTC
-   * The test shows 7:59 UTC jump to 9:00 UTC.
+   * In this Test, we let job run at 0:59 at Nov.1st, 2020. it shows that it is 7:59 UTC The test
+   * shows 7:59 UTC jump to 9:00 UTC.
    */
   @Test
   public void testPDTtoPSTdst2() {
 
-    DateTime now = DateTime.now();
+    final DateTime now = DateTime.now();
 
     // 7:59 UTC == 0:59 PDT (difference is 7 hours)
-    String cronExpression = "0 59 0 1,2 11 ? 2020";
+    final String cronExpression = "0 59 0 1,2 11 ? 2020";
 
-    BasicTimeChecker timeChecker =
+    final BasicTimeChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(),
             DateTimeZone.forID("America/Los_Angeles"), true, true, null, cronExpression);
     System.out.println("getNextCheckTime = " + timeChecker.getNextCheckTime());
 
-    Condition cond = getCondition(timeChecker);
+    final Condition cond = getCondition(timeChecker);
 
     // 7:59 UTC == 0:59 PDT (difference is 7 hours)
-    DateTime winter2020 = new DateTime(2020, 11, 1, 7, 59, 0, DateTimeZone.UTC);
-    DateTime winter2020_2 = new DateTime(2020, 11, 1, 0, 59, 0, DateTimeZone.forID("America/Los_Angeles"));
+    final DateTime winter2020 = new DateTime(2020, 11, 1, 7, 59, 0, DateTimeZone.UTC);
+    final DateTime winter2020_2 = new DateTime(2020, 11, 1, 0, 59, 0,
+        DateTimeZone.forID("America/Los_Angeles"));
 
     // Local time remains the same.
     assertTrue(cond.getNextCheckTime() == winter2020.getMillis());
@@ -231,36 +235,38 @@ public class BasicTimeCheckerTest {
   }
 
 
-
   /**
-   * Test when PDT-->PST happens in 2020. -7:00 -> -8:00
-   * See details why confusion happens during this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
+   * Test when PDT-->PST happens in 2020. -7:00 -> -8:00 See details why confusion happens during
+   * this change: https://en.wikipedia.org/wiki/Pacific_Time_Zone
    *
    * This test is a supplement to cronDayLightPacificWinter1.
    *
    * Still, we let job run at 1:30 at Nov.1st, 2020. We know that we will have two 1:30 at that day.
-   * The test shows the 1:30 at that day will be based on PST, not PDT. It means that the first 1:30 is skipped at that day.
+   * The test shows the 1:30 at that day will be based on PST, not PDT. It means that the first 1:30
+   * is skipped at that day.
    */
   @Test
   public void testPDTtoPSTdst3() {
-    
-    DateTime now = DateTime.now();
+
+    final DateTime now = DateTime.now();
 
     // 9:30 UTC == 1:30 PST (difference is 8 hours)
-    String cronExpression = "0 30 1 1,2 11 ? 2020";
+    final String cronExpression = "0 30 1 1,2 11 ? 2020";
 
-    BasicTimeChecker timeChecker =
+    final BasicTimeChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecket_1", now.getMillis(),
             DateTimeZone.forID("America/Los_Angeles"), true, true, null, cronExpression);
     System.out.println("getNextCheckTime = " + timeChecker.getNextCheckTime());
 
-    Condition cond = getCondition(timeChecker);
+    final Condition cond = getCondition(timeChecker);
 
     // 9:30 UTC == 1:30 PST (difference is 8 hours)
-    DateTime winter2020 = new DateTime(2020, 11, 1, 9, 30, 0, DateTimeZone.UTC);
+    final DateTime winter2020 = new DateTime(2020, 11, 1, 9, 30, 0, DateTimeZone.UTC);
 
-    DateTime winter2020_2 = new DateTime(2020, 11, 1, 1, 30, 0, DateTimeZone.forID("America/Los_Angeles"));
-    DateTime winter2020_3 = new DateTime(2020, 11, 1, 2, 30, 0, DateTimeZone.forID("America/Los_Angeles"));
+    final DateTime winter2020_2 = new DateTime(2020, 11, 1, 1, 30, 0,
+        DateTimeZone.forID("America/Los_Angeles"));
+    final DateTime winter2020_3 = new DateTime(2020, 11, 1, 2, 30, 0,
+        DateTimeZone.forID("America/Los_Angeles"));
     assertTrue(cond.getNextCheckTime() == winter2020.getMillis());
 
     // Both 1:30 and 2:30 can not pass the test.
diff --git a/azkaban-common/src/test/java/azkaban/trigger/ConditionTest.java b/azkaban-common/src/test/java/azkaban/trigger/ConditionTest.java
index cf5a1fa..fcd1b80 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/ConditionTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/ConditionTest.java
@@ -16,47 +16,44 @@
 
 package azkaban.trigger;
 
-import java.io.File;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.joda.time.DateTime;
-
-import org.junit.Test;
-import org.junit.Ignore;
-
-import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 
 import azkaban.trigger.builtin.BasicTimeChecker;
 import azkaban.utils.JSONUtils;
 import azkaban.utils.Props;
 import azkaban.utils.Utils;
+import java.io.File;
+import java.util.HashMap;
+import java.util.Map;
+import org.joda.time.DateTime;
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class ConditionTest {
 
   @Test
   public void conditionTest() {
 
-    Map<String, ConditionChecker> checkers =
-        new HashMap<String, ConditionChecker>();
+    final Map<String, ConditionChecker> checkers =
+        new HashMap<>();
 
-    ThresholdChecker fake1 = new ThresholdChecker("thresholdchecker1", 10);
-    ThresholdChecker fake2 = new ThresholdChecker("thresholdchecker2", 20);
+    final ThresholdChecker fake1 = new ThresholdChecker("thresholdchecker1", 10);
+    final ThresholdChecker fake2 = new ThresholdChecker("thresholdchecker2", 20);
     ThresholdChecker.setVal(15);
     checkers.put(fake1.getId(), fake1);
     checkers.put(fake2.getId(), fake2);
 
-    String expr1 =
+    final String expr1 =
         "( " + fake1.getId() + ".eval()" + " && " + fake2.getId() + ".eval()"
             + " )" + " || " + "( " + fake1.getId() + ".eval()" + " && " + "!"
             + fake2.getId() + ".eval()" + " )";
-    String expr2 =
+    final String expr2 =
         "( " + fake1.getId() + ".eval()" + " && " + fake2.getId() + ".eval()"
             + " )" + " || " + "( " + fake1.getId() + ".eval()" + " && "
             + fake2.getId() + ".eval()" + " )";
 
-    Condition cond = new Condition(checkers, expr1);
+    final Condition cond = new Condition(checkers, expr1);
 
     System.out.println("Setting expression " + expr1);
     assertTrue(cond.isMet());
@@ -66,47 +63,48 @@ public class ConditionTest {
 
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void jsonConversionTest() throws Exception {
 
-    CheckerTypeLoader checkerTypeLoader = new CheckerTypeLoader();
+    final CheckerTypeLoader checkerTypeLoader = new CheckerTypeLoader();
     checkerTypeLoader.init(new Props());
     Condition.setCheckerLoader(checkerTypeLoader);
 
-    Map<String, ConditionChecker> checkers =
-        new HashMap<String, ConditionChecker>();
+    final Map<String, ConditionChecker> checkers =
+        new HashMap<>();
 
     // get a new timechecker, start from now, repeat every minute. should
     // evaluate to false now, and true a minute later.
-    DateTime now = DateTime.now();
-    String period = "6s";
+    final DateTime now = DateTime.now();
+    final String period = "6s";
 
     // BasicTimeChecker timeChecker = new BasicTimeChecker(now, true, true,
     // period);
-    ConditionChecker timeChecker =
+    final ConditionChecker timeChecker =
         new BasicTimeChecker("BasicTimeChecker_1", now.getMillis(),
             now.getZone(), true, true, Utils.parsePeriodString(period), null);
     System.out.println("checker id is " + timeChecker.getId());
 
     checkers.put(timeChecker.getId(), timeChecker);
-    String expr = timeChecker.getId() + ".eval()";
+    final String expr = timeChecker.getId() + ".eval()";
 
-    Condition cond = new Condition(checkers, expr);
+    final Condition cond = new Condition(checkers, expr);
 
-    File temp = File.createTempFile("temptest", "temptest");
+    final File temp = File.createTempFile("temptest", "temptest");
     temp.deleteOnExit();
-    Object obj = cond.toJson();
+    final Object obj = cond.toJson();
     JSONUtils.toJSON(obj, temp);
 
-    Condition cond2 = Condition.fromJson(JSONUtils.parseJSONFromFile(temp));
+    final Condition cond2 = Condition.fromJson(JSONUtils.parseJSONFromFile(temp));
 
-    Map<String, ConditionChecker> checkers2 = cond2.getCheckers();
+    final Map<String, ConditionChecker> checkers2 = cond2.getCheckers();
 
     assertTrue(cond.getExpression().equals(cond2.getExpression()));
     System.out.println("cond1: " + cond.getExpression());
     System.out.println("cond2: " + cond2.getExpression());
     assertTrue(checkers2.size() == 1);
-    ConditionChecker checker2 = checkers2.get(timeChecker.getId());
+    final ConditionChecker checker2 = checkers2.get(timeChecker.getId());
     // assertTrue(checker2.getId().equals(timeChecker.getId()));
     System.out.println("checker1: " + timeChecker.getId());
     System.out.println("checker2: " + checker2.getId());
diff --git a/azkaban-common/src/test/java/azkaban/trigger/DummyTriggerAction.java b/azkaban-common/src/test/java/azkaban/trigger/DummyTriggerAction.java
index c5a26c4..4d234ae 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/DummyTriggerAction.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/DummyTriggerAction.java
@@ -18,15 +18,13 @@ package azkaban.trigger;
 
 import java.util.Map;
 
-import azkaban.trigger.TriggerAction;
-
 public class DummyTriggerAction implements TriggerAction {
 
   public static final String type = "DummyAction";
 
-  private String message;
+  private final String message;
 
-  public DummyTriggerAction(String message) {
+  public DummyTriggerAction(final String message) {
     this.message = message;
   }
 
@@ -36,7 +34,7 @@ public class DummyTriggerAction implements TriggerAction {
   }
 
   @Override
-  public TriggerAction fromJson(Object obj) {
+  public TriggerAction fromJson(final Object obj) {
     return null;
   }
 
@@ -48,7 +46,7 @@ public class DummyTriggerAction implements TriggerAction {
   @Override
   public void doAction() {
     System.out.println(getType() + " invoked.");
-    System.out.println(message);
+    System.out.println(this.message);
   }
 
   @Override
@@ -62,6 +60,6 @@ public class DummyTriggerAction implements TriggerAction {
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/trigger/ExecuteFlowActionTest.java b/azkaban-common/src/test/java/azkaban/trigger/ExecuteFlowActionTest.java
index 0e8b2f9..3c86dc5 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/ExecuteFlowActionTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/ExecuteFlowActionTest.java
@@ -16,36 +16,35 @@
 
 package azkaban.trigger;
 
-import java.util.ArrayList;
-import java.util.List;
-
-import org.junit.Ignore;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertTrue;
 
 import azkaban.executor.ExecutionOptions;
 import azkaban.trigger.builtin.ExecuteFlowAction;
 import azkaban.utils.Props;
+import java.util.ArrayList;
+import java.util.List;
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class ExecuteFlowActionTest {
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void jsonConversionTest() throws Exception {
-    ActionTypeLoader loader = new ActionTypeLoader();
+    final ActionTypeLoader loader = new ActionTypeLoader();
     loader.init(new Props());
 
-    ExecutionOptions options = new ExecutionOptions();
-    List<Object> disabledJobs = new ArrayList<Object>();
+    final ExecutionOptions options = new ExecutionOptions();
+    final List<Object> disabledJobs = new ArrayList<>();
     options.setDisabledJobs(disabledJobs);
 
-    ExecuteFlowAction executeFlowAction =
+    final ExecuteFlowAction executeFlowAction =
         new ExecuteFlowAction("ExecuteFlowAction", 1, "testproject",
             "testflow", "azkaban", options, null);
 
-    Object obj = executeFlowAction.toJson();
+    final Object obj = executeFlowAction.toJson();
 
-    ExecuteFlowAction action =
+    final ExecuteFlowAction action =
         (ExecuteFlowAction) loader.createActionFromJson(ExecuteFlowAction.type,
             obj);
     assertTrue(executeFlowAction.getProjectId() == action.getProjectId());
diff --git a/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerImplTest.java b/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerImplTest.java
index 8a982f5..498edd3 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerImplTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerImplTest.java
@@ -16,6 +16,8 @@
 
 package azkaban.trigger;
 
+import static org.junit.Assert.assertTrue;
+
 import azkaban.database.AzkabanConnectionPoolTest;
 import azkaban.database.AzkabanDataSource;
 import azkaban.database.AzkabanDatabaseSetup;
@@ -39,33 +41,31 @@ import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-
 
 public class JdbcTriggerImplTest {
 
+  public static AzkabanDataSource dataSource = new AzkabanConnectionPoolTest.EmbeddedH2BasicDataSource();
   TriggerLoader loader;
   DatabaseOperator dbOperator;
-  public static AzkabanDataSource dataSource = new AzkabanConnectionPoolTest.EmbeddedH2BasicDataSource();
 
   @BeforeClass
   public static void prepare() throws Exception {
-    Props props = new Props();
+    final Props props = new Props();
 
-    String sqlScriptsDir = new File("../azkaban-db/src/main/sql/").getCanonicalPath();
+    final String sqlScriptsDir = new File("../azkaban-db/src/main/sql/").getCanonicalPath();
     props.put("database.sql.scripts.dir", sqlScriptsDir);
 
-    AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(dataSource, props);
+    final AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(dataSource, props);
     setup.loadTableInfo();
     setup.updateDatabase(true, false);
 
-    CheckerTypeLoader checkerTypeLoader = new CheckerTypeLoader();
-    ActionTypeLoader actionTypeLoader = new ActionTypeLoader();
+    final CheckerTypeLoader checkerTypeLoader = new CheckerTypeLoader();
+    final ActionTypeLoader actionTypeLoader = new ActionTypeLoader();
 
     try {
       checkerTypeLoader.init(null);
       actionTypeLoader.init(null);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new TriggerManagerException(e);
     }
 
@@ -79,41 +79,41 @@ public class JdbcTriggerImplTest {
   @Before
   public void setUp() {
 
-    dbOperator = new DatabaseOperatorImpl(new QueryRunner(dataSource));
-    loader = new JdbcTriggerImpl(dbOperator);
+    this.dbOperator = new DatabaseOperatorImpl(new QueryRunner(dataSource));
+    this.loader = new JdbcTriggerImpl(this.dbOperator);
   }
 
   @Test
   public void testRemoveTriggers() throws Exception {
-    Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
-    Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
-    loader.addTrigger(t1);
-    loader.addTrigger(t2);
-    List<Trigger> ts = loader.loadTriggers();
+    final Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+    final Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
+    this.loader.addTrigger(t1);
+    this.loader.addTrigger(t2);
+    List<Trigger> ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 2);
-    loader.removeTrigger(t2);
-    ts = loader.loadTriggers();
+    this.loader.removeTrigger(t2);
+    ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 1);
     assertTrue(ts.get(0).getTriggerId() == t1.getTriggerId());
   }
 
   @Test
   public void testAddTrigger() throws Exception {
-    Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
-    Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
-    loader.addTrigger(t1);
+    final Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+    final Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
+    this.loader.addTrigger(t1);
 
-    List<Trigger> ts = loader.loadTriggers();
+    List<Trigger> ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 1);
 
-    Trigger t3 = ts.get(0);
+    final Trigger t3 = ts.get(0);
     assertTrue(t3.getSource().equals("source1"));
 
-    loader.addTrigger(t2);
-    ts = loader.loadTriggers();
+    this.loader.addTrigger(t2);
+    ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 2);
 
-    for (Trigger t : ts) {
+    for (final Trigger t : ts) {
       if (t.getTriggerId() == t2.getTriggerId()) {
         t.getSource().equals(t2.getSource());
       }
@@ -122,35 +122,35 @@ public class JdbcTriggerImplTest {
 
   @Test
   public void testUpdateTrigger() throws Exception {
-    Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+    final Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
     t1.setResetOnExpire(true);
-    loader.addTrigger(t1);
-    List<Trigger> ts = loader.loadTriggers();
+    this.loader.addTrigger(t1);
+    List<Trigger> ts = this.loader.loadTriggers();
     assertTrue(ts.get(0).isResetOnExpire() == true);
     t1.setResetOnExpire(false);
-    loader.updateTrigger(t1);
-    ts = loader.loadTriggers();
+    this.loader.updateTrigger(t1);
+    ts = this.loader.loadTriggers();
     assertTrue(ts.get(0).isResetOnExpire() == false);
   }
 
-  private Trigger createTrigger(String projName, String flowName, String source) {
-    DateTime now = DateTime.now();
-    ConditionChecker checker1 =
+  private Trigger createTrigger(final String projName, final String flowName, final String source) {
+    final DateTime now = DateTime.now();
+    final ConditionChecker checker1 =
         new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(),
             true, true, Utils.parsePeriodString("1h"), null);
-    Map<String, ConditionChecker> checkers1 =
-        new HashMap<String, ConditionChecker>();
+    final Map<String, ConditionChecker> checkers1 =
+        new HashMap<>();
     checkers1.put(checker1.getId(), checker1);
-    String expr1 = checker1.getId() + ".eval()";
-    Condition triggerCond = new Condition(checkers1, expr1);
-    Condition expireCond = new Condition(checkers1, expr1);
-    List<TriggerAction> actions = new ArrayList<TriggerAction>();
-    TriggerAction action =
+    final String expr1 = checker1.getId() + ".eval()";
+    final Condition triggerCond = new Condition(checkers1, expr1);
+    final Condition expireCond = new Condition(checkers1, expr1);
+    final List<TriggerAction> actions = new ArrayList<>();
+    final TriggerAction action =
         new ExecuteFlowAction("executeAction", 1, projName, flowName,
             "azkaban", new ExecutionOptions(), null);
     actions.add(action);
 
-    Trigger t = new Trigger.TriggerBuilder("azkaban",
+    final Trigger t = new Trigger.TriggerBuilder("azkaban",
         source,
         triggerCond,
         expireCond,
@@ -163,9 +163,9 @@ public class JdbcTriggerImplTest {
   @After
   public void clearDB() {
     try {
-      dbOperator.update("DELETE FROM triggers");
+      this.dbOperator.update("DELETE FROM triggers");
 
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       return;
     }
diff --git a/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerLoaderTest.java b/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerLoaderTest.java
index 4599124..56f5160 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerLoaderTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/JdbcTriggerLoaderTest.java
@@ -16,6 +16,14 @@
 
 package azkaban.trigger;
 
+import static org.junit.Assert.assertTrue;
+
+import azkaban.database.DataSourceUtils;
+import azkaban.executor.ExecutionOptions;
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.trigger.builtin.ExecuteFlowAction;
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -23,32 +31,18 @@ import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import javax.sql.DataSource;
-
 import org.apache.commons.dbutils.DbUtils;
 import org.apache.commons.dbutils.QueryRunner;
 import org.apache.commons.dbutils.ResultSetHandler;
-
 import org.joda.time.DateTime;
-
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-
-import azkaban.database.DataSourceUtils;
-import azkaban.executor.ExecutionOptions;
-import azkaban.trigger.builtin.BasicTimeChecker;
-import azkaban.trigger.builtin.ExecuteFlowAction;
-import azkaban.utils.Props;
-import azkaban.utils.Utils;
-
 public class JdbcTriggerLoaderTest {
 
-  private static boolean testDBExists = false;
   // @TODO remove this and turn into local host.
   private static final String host = "localhost";
   private static final int port = 3306;
@@ -56,14 +50,14 @@ public class JdbcTriggerLoaderTest {
   private static final String user = "azkaban";
   private static final String password = "azkaban";
   private static final int numConnections = 10;
-
+  private static boolean testDBExists = false;
   private TriggerLoader loader;
   private CheckerTypeLoader checkerLoader;
   private ActionTypeLoader actionLoader;
 
   @Before
   public void setup() throws TriggerException {
-    Props props = new Props();
+    final Props props = new Props();
     props.put("database.type", "mysql");
 
     props.put("mysql.host", host);
@@ -73,23 +67,23 @@ public class JdbcTriggerLoaderTest {
     props.put("mysql.password", password);
     props.put("mysql.numconnections", numConnections);
 
-    loader = new JdbcTriggerLoader(props);
-    checkerLoader = new CheckerTypeLoader();
-    checkerLoader.init(new Props());
-    checkerLoader.registerCheckerType(BasicTimeChecker.type,
+    this.loader = new JdbcTriggerLoader(props);
+    this.checkerLoader = new CheckerTypeLoader();
+    this.checkerLoader.init(new Props());
+    this.checkerLoader.registerCheckerType(BasicTimeChecker.type,
         BasicTimeChecker.class);
-    Condition.setCheckerLoader(checkerLoader);
-    actionLoader = new ActionTypeLoader();
-    actionLoader.init(new Props());
+    Condition.setCheckerLoader(this.checkerLoader);
+    this.actionLoader = new ActionTypeLoader();
+    this.actionLoader.init(new Props());
 
-    actionLoader.registerActionType(ExecuteFlowAction.type,
+    this.actionLoader.registerActionType(ExecuteFlowAction.type,
         ExecuteFlowAction.class);
-    Trigger.setActionTypeLoader(actionLoader);
+    Trigger.setActionTypeLoader(this.actionLoader);
     setupDB();
   }
 
   public void setupDB() {
-    DataSource dataSource =
+    final DataSource dataSource =
         DataSourceUtils.getMySQLDataSource(host, port, database, user,
             password, numConnections);
     testDBExists = true;
@@ -97,18 +91,18 @@ public class JdbcTriggerLoaderTest {
     Connection connection = null;
     try {
       connection = dataSource.getConnection();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
       return;
     }
 
-    CountHandler countHandler = new CountHandler();
-    QueryRunner runner = new QueryRunner();
+    final CountHandler countHandler = new CountHandler();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.query(connection, "SELECT COUNT(1) FROM triggers", countHandler);
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -126,24 +120,24 @@ public class JdbcTriggerLoaderTest {
       return;
     }
 
-    DataSource dataSource =
+    final DataSource dataSource =
         DataSourceUtils.getMySQLDataSource(host, port, database, user,
             password, numConnections);
     Connection connection = null;
     try {
       connection = dataSource.getConnection();
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
       return;
     }
 
-    QueryRunner runner = new QueryRunner();
+    final QueryRunner runner = new QueryRunner();
     try {
       runner.update(connection, "DELETE FROM triggers");
 
-    } catch (SQLException e) {
+    } catch (final SQLException e) {
       e.printStackTrace();
       testDBExists = false;
       DbUtils.closeQuietly(connection);
@@ -153,73 +147,76 @@ public class JdbcTriggerLoaderTest {
     DbUtils.closeQuietly(connection);
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void addTriggerTest() throws TriggerLoaderException {
-    Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
-    Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
-    loader.addTrigger(t1);
-    List<Trigger> ts = loader.loadTriggers();
+    final Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+    final Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
+    this.loader.addTrigger(t1);
+    List<Trigger> ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 1);
 
-    Trigger t3 = ts.get(0);
+    final Trigger t3 = ts.get(0);
     assertTrue(t3.getSource().equals("source1"));
 
-    loader.addTrigger(t2);
-    ts = loader.loadTriggers();
+    this.loader.addTrigger(t2);
+    ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 2);
 
-    for (Trigger t : ts) {
+    for (final Trigger t : ts) {
       if (t.getTriggerId() == t2.getTriggerId()) {
         t.getSource().equals(t2.getSource());
       }
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void removeTriggerTest() throws TriggerLoaderException {
-    Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
-    Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
-    loader.addTrigger(t1);
-    loader.addTrigger(t2);
-    List<Trigger> ts = loader.loadTriggers();
+    final Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+    final Trigger t2 = createTrigger("testProj2", "testFlow2", "source2");
+    this.loader.addTrigger(t1);
+    this.loader.addTrigger(t2);
+    List<Trigger> ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 2);
-    loader.removeTrigger(t2);
-    ts = loader.loadTriggers();
+    this.loader.removeTrigger(t2);
+    ts = this.loader.loadTriggers();
     assertTrue(ts.size() == 1);
     assertTrue(ts.get(0).getTriggerId() == t1.getTriggerId());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void updateTriggerTest() throws TriggerLoaderException {
-    Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
+    final Trigger t1 = createTrigger("testProj1", "testFlow1", "source1");
     t1.setResetOnExpire(true);
-    loader.addTrigger(t1);
-    List<Trigger> ts = loader.loadTriggers();
+    this.loader.addTrigger(t1);
+    List<Trigger> ts = this.loader.loadTriggers();
     assertTrue(ts.get(0).isResetOnExpire() == true);
     t1.setResetOnExpire(false);
-    loader.updateTrigger(t1);
-    ts = loader.loadTriggers();
+    this.loader.updateTrigger(t1);
+    ts = this.loader.loadTriggers();
     assertTrue(ts.get(0).isResetOnExpire() == false);
   }
 
-  private Trigger createTrigger(String projName, String flowName, String source) {
-    DateTime now = DateTime.now();
-    ConditionChecker checker1 =
+  private Trigger createTrigger(final String projName, final String flowName, final String source) {
+    final DateTime now = DateTime.now();
+    final ConditionChecker checker1 =
         new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(),
             true, true, Utils.parsePeriodString("1h"), null);
-    Map<String, ConditionChecker> checkers1 =
-        new HashMap<String, ConditionChecker>();
+    final Map<String, ConditionChecker> checkers1 =
+        new HashMap<>();
     checkers1.put(checker1.getId(), checker1);
-    String expr1 = checker1.getId() + ".eval()";
-    Condition triggerCond = new Condition(checkers1, expr1);
-    Condition expireCond = new Condition(checkers1, expr1);
-    List<TriggerAction> actions = new ArrayList<TriggerAction>();
-    TriggerAction action =
+    final String expr1 = checker1.getId() + ".eval()";
+    final Condition triggerCond = new Condition(checkers1, expr1);
+    final Condition expireCond = new Condition(checkers1, expr1);
+    final List<TriggerAction> actions = new ArrayList<>();
+    final TriggerAction action =
         new ExecuteFlowAction("executeAction", 1, projName, flowName,
             "azkaban", new ExecutionOptions(), null);
     actions.add(action);
 
-    Trigger t = new Trigger.TriggerBuilder("azkaban",
+    final Trigger t = new Trigger.TriggerBuilder("azkaban",
         source,
         triggerCond,
         expireCond,
@@ -229,8 +226,9 @@ public class JdbcTriggerLoaderTest {
   }
 
   public static class CountHandler implements ResultSetHandler<Integer> {
+
     @Override
-    public Integer handle(ResultSet rs) throws SQLException {
+    public Integer handle(final ResultSet rs) throws SQLException {
       int val = 0;
       while (rs.next()) {
         val++;
diff --git a/azkaban-common/src/test/java/azkaban/trigger/MockTriggerLoader.java b/azkaban-common/src/test/java/azkaban/trigger/MockTriggerLoader.java
index 3b91493..e9d7047 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/MockTriggerLoader.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/MockTriggerLoader.java
@@ -23,44 +23,44 @@ import java.util.Map;
 
 public class MockTriggerLoader implements TriggerLoader {
 
-  Map<Integer, Trigger> triggers = new HashMap<Integer, Trigger>();
+  Map<Integer, Trigger> triggers = new HashMap<>();
   int triggerCount = 0;
 
   @Override
-  public synchronized void addTrigger(Trigger t) throws TriggerLoaderException {
-    t.setTriggerId(triggerCount);
+  public synchronized void addTrigger(final Trigger t) throws TriggerLoaderException {
+    t.setTriggerId(this.triggerCount);
     t.setLastModifyTime(System.currentTimeMillis());
-    triggers.put(t.getTriggerId(), t);
-    triggerCount++;
+    this.triggers.put(t.getTriggerId(), t);
+    this.triggerCount++;
   }
 
   @Override
-  public synchronized void removeTrigger(Trigger s)
+  public synchronized void removeTrigger(final Trigger s)
       throws TriggerLoaderException {
-    triggers.remove(s.getTriggerId());
+    this.triggers.remove(s.getTriggerId());
   }
 
   @Override
-  public synchronized void updateTrigger(Trigger t)
+  public synchronized void updateTrigger(final Trigger t)
       throws TriggerLoaderException {
     t.setLastModifyTime(System.currentTimeMillis());
-    triggers.put(t.getTriggerId(), t);
+    this.triggers.put(t.getTriggerId(), t);
   }
 
   @Override
   public synchronized List<Trigger> loadTriggers()
       throws TriggerLoaderException {
-    return new ArrayList<Trigger>(triggers.values());
+    return new ArrayList<>(this.triggers.values());
   }
 
   @Override
-  public synchronized Trigger loadTrigger(int triggerId)
+  public synchronized Trigger loadTrigger(final int triggerId)
       throws TriggerLoaderException {
-    return triggers.get(triggerId);
+    return this.triggers.get(triggerId);
   }
 
   @Override
-  public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+  public List<Trigger> getUpdatedTriggers(final long lastUpdateTime)
       throws TriggerLoaderException {
     // TODO Auto-generated method stub
     return null;
diff --git a/azkaban-common/src/test/java/azkaban/trigger/ThresholdChecker.java b/azkaban-common/src/test/java/azkaban/trigger/ThresholdChecker.java
index 279c329..dec5fe5 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/ThresholdChecker.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/ThresholdChecker.java
@@ -18,50 +18,44 @@ package azkaban.trigger;
 
 import java.util.Map;
 
-import azkaban.trigger.ConditionChecker;
-
 public class ThresholdChecker implements ConditionChecker {
 
-  private int threshold = -1;
-
-  private static int curVal = -1;
-
   public static final String type = "ThresholdChecker";
-
-  private String id;
-
+  private static int curVal = -1;
+  private final String id;
+  private int threshold = -1;
   private boolean checkerMet = false;
   private boolean checkerReset = false;
 
-  public ThresholdChecker(String id, int threshold) {
+  public ThresholdChecker(final String id, final int threshold) {
     this.id = id;
     this.threshold = threshold;
   }
 
-  public synchronized static void setVal(int val) {
+  public synchronized static void setVal(final int val) {
     curVal = val;
   }
 
   @Override
   public Boolean eval() {
-    if (curVal > threshold) {
-      checkerMet = true;
+    if (curVal > this.threshold) {
+      this.checkerMet = true;
     }
-    return checkerMet;
+    return this.checkerMet;
   }
 
   public boolean isCheckerMet() {
-    return checkerMet;
+    return this.checkerMet;
   }
 
   @Override
   public void reset() {
-    checkerMet = false;
-    checkerReset = true;
+    this.checkerMet = false;
+    this.checkerReset = true;
   }
 
   public boolean isCheckerReset() {
-    return checkerReset;
+    return this.checkerReset;
   }
 
   /*
@@ -71,7 +65,7 @@ public class ThresholdChecker implements ConditionChecker {
    */
   @Override
   public String getId() {
-    return id;
+    return this.id;
   }
 
   @Override
@@ -80,7 +74,7 @@ public class ThresholdChecker implements ConditionChecker {
   }
 
   @Override
-  public ConditionChecker fromJson(Object obj) {
+  public ConditionChecker fromJson(final Object obj) {
     return null;
   }
 
@@ -103,7 +97,7 @@ public class ThresholdChecker implements ConditionChecker {
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
     // TODO Auto-generated method stub
 
   }
diff --git a/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerDeadlockTest.java b/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerDeadlockTest.java
index b7303eb..6ec2cd7 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerDeadlockTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerDeadlockTest.java
@@ -17,23 +17,21 @@
 package azkaban.trigger;
 
 import azkaban.executor.AlerterHolder;
+import azkaban.executor.ExecutorLoader;
+import azkaban.executor.ExecutorManager;
+import azkaban.executor.ExecutorManagerException;
+import azkaban.executor.MockExecutorLoader;
+import azkaban.trigger.builtin.CreateTriggerAction;
+import azkaban.utils.Props;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import azkaban.executor.ExecutorLoader;
-import azkaban.executor.ExecutorManager;
-import azkaban.executor.ExecutorManagerException;
-import azkaban.executor.MockExecutorLoader;
-import azkaban.trigger.builtin.CreateTriggerAction;
-import azkaban.utils.Props;
-
 public class TriggerManagerDeadlockTest {
 
   TriggerLoader loader;
@@ -42,13 +40,14 @@ public class TriggerManagerDeadlockTest {
 
   @Before
   public void setup() throws ExecutorManagerException, TriggerManagerException {
-    loader = new MockTriggerLoader();
-    Props props = new Props();
+    this.loader = new MockTriggerLoader();
+    final Props props = new Props();
     props.put("trigger.scan.interval", 1000);
     props.put("executor.port", 12321);
-    execLoader = new MockExecutorLoader();
-    ExecutorManager executorManager = new ExecutorManager(props, execLoader, new AlerterHolder(props));
-    triggerManager = new TriggerManager(props, loader, executorManager);
+    this.execLoader = new MockExecutorLoader();
+    final ExecutorManager executorManager = new ExecutorManager(props, this.execLoader,
+        new AlerterHolder(props));
+    this.triggerManager = new TriggerManager(props, this.loader, executorManager);
   }
 
   @After
@@ -57,24 +56,88 @@ public class TriggerManagerDeadlockTest {
   }
 
   // TODO kunkun-tang: This test has problems. Will fix
-  @Ignore @Test
+  @Ignore
+  @Test
   public void deadlockTest() throws TriggerLoaderException,
       TriggerManagerException {
     // this should well saturate it
     for (int i = 0; i < 1000; i++) {
-      Trigger t = createSelfRegenTrigger();
-      loader.addTrigger(t);
+      final Trigger t = createSelfRegenTrigger();
+      this.loader.addTrigger(t);
     }
     // keep going and add more
     for (int i = 0; i < 10000; i++) {
-      Trigger d = createDummyTrigger();
-      triggerManager.insertTrigger(d);
-      triggerManager.removeTrigger(d);
+      final Trigger d = createDummyTrigger();
+      this.triggerManager.insertTrigger(d);
+      this.triggerManager.removeTrigger(d);
     }
 
     System.out.println("No dead lock.");
   }
 
+  private Trigger createSelfRegenTrigger() {
+    final ConditionChecker alwaysOnChecker =
+        new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
+    final String triggerExpr = alwaysOnChecker.getId() + ".eval()";
+    final Map<String, ConditionChecker> triggerCheckers =
+        new HashMap<>();
+    triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
+    final Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
+
+    final TriggerAction triggerAct =
+        new CreateTriggerAction("dummyTrigger", createDummyTrigger());
+    final List<TriggerAction> actions = new ArrayList<>();
+    actions.add(triggerAct);
+
+    final ConditionChecker alwaysOffChecker =
+        new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
+    final String expireExpr = alwaysOffChecker.getId() + ".eval()";
+    final Map<String, ConditionChecker> expireCheckers =
+        new HashMap<>();
+    expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
+    final Condition expireCond = new Condition(expireCheckers, expireExpr);
+
+    final Trigger t =
+        new Trigger.TriggerBuilder("azkaban",
+            "azkabanTest",
+            triggerCond,
+            expireCond,
+            actions).build();
+
+    return t;
+  }
+
+  private Trigger createDummyTrigger() {
+    final ConditionChecker alwaysOnChecker =
+        new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
+    final String triggerExpr = alwaysOnChecker.getId() + ".eval()";
+    final Map<String, ConditionChecker> triggerCheckers =
+        new HashMap<>();
+    triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
+    final Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
+
+    final TriggerAction triggerAct = new DummyTriggerAction("howdy!");
+    final List<TriggerAction> actions = new ArrayList<>();
+    actions.add(triggerAct);
+
+    final ConditionChecker alwaysOffChecker =
+        new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
+    final String expireExpr = alwaysOffChecker.getId() + ".eval()";
+    final Map<String, ConditionChecker> expireCheckers =
+        new HashMap<>();
+    expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
+    final Condition expireCond = new Condition(expireCheckers, expireExpr);
+
+    final Trigger t =
+        new Trigger.TriggerBuilder("azkaban",
+            "azkabanTest",
+            triggerCond,
+            expireCond,
+            actions).build();
+
+    return t;
+  }
+
   public static class AlwaysOnChecker implements ConditionChecker {
 
     public static final String type = "AlwaysOnChecker";
@@ -82,7 +145,7 @@ public class TriggerManagerDeadlockTest {
     private final String id;
     private final Boolean alwaysOn;
 
-    public AlwaysOnChecker(String id, Boolean alwaysOn) {
+    public AlwaysOnChecker(final String id, final Boolean alwaysOn) {
       this.id = id;
       this.alwaysOn = alwaysOn;
     }
@@ -90,7 +153,7 @@ public class TriggerManagerDeadlockTest {
     @Override
     public Object eval() {
       // TODO Auto-generated method stub
-      return alwaysOn;
+      return this.alwaysOn;
     }
 
     @Override
@@ -107,7 +170,7 @@ public class TriggerManagerDeadlockTest {
 
     @Override
     public String getId() {
-      return id;
+      return this.id;
     }
 
     @Override
@@ -117,7 +180,7 @@ public class TriggerManagerDeadlockTest {
     }
 
     @Override
-    public ConditionChecker fromJson(Object obj) throws Exception {
+    public ConditionChecker fromJson(final Object obj) throws Exception {
       // TODO Auto-generated method stub
       return null;
     }
@@ -135,7 +198,7 @@ public class TriggerManagerDeadlockTest {
     }
 
     @Override
-    public void setContext(Map<String, Object> context) {
+    public void setContext(final Map<String, Object> context) {
       // TODO Auto-generated method stub
 
     }
@@ -148,67 +211,4 @@ public class TriggerManagerDeadlockTest {
 
   }
 
-  private Trigger createSelfRegenTrigger() {
-    ConditionChecker alwaysOnChecker =
-        new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
-    String triggerExpr = alwaysOnChecker.getId() + ".eval()";
-    Map<String, ConditionChecker> triggerCheckers =
-        new HashMap<String, ConditionChecker>();
-    triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
-    Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
-
-    TriggerAction triggerAct =
-        new CreateTriggerAction("dummyTrigger", createDummyTrigger());
-    List<TriggerAction> actions = new ArrayList<TriggerAction>();
-    actions.add(triggerAct);
-
-    ConditionChecker alwaysOffChecker =
-        new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
-    String expireExpr = alwaysOffChecker.getId() + ".eval()";
-    Map<String, ConditionChecker> expireCheckers =
-        new HashMap<String, ConditionChecker>();
-    expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
-    Condition expireCond = new Condition(expireCheckers, expireExpr);
-
-    Trigger t =
-        new Trigger.TriggerBuilder("azkaban",
-            "azkabanTest",
-            triggerCond,
-            expireCond,
-            actions).build();
-
-    return t;
-  }
-
-  private Trigger createDummyTrigger() {
-    ConditionChecker alwaysOnChecker =
-        new AlwaysOnChecker("alwaysOn", Boolean.TRUE);
-    String triggerExpr = alwaysOnChecker.getId() + ".eval()";
-    Map<String, ConditionChecker> triggerCheckers =
-        new HashMap<String, ConditionChecker>();
-    triggerCheckers.put(alwaysOnChecker.getId(), alwaysOnChecker);
-    Condition triggerCond = new Condition(triggerCheckers, triggerExpr);
-
-    TriggerAction triggerAct = new DummyTriggerAction("howdy!");
-    List<TriggerAction> actions = new ArrayList<TriggerAction>();
-    actions.add(triggerAct);
-
-    ConditionChecker alwaysOffChecker =
-        new AlwaysOnChecker("alwaysOff", Boolean.FALSE);
-    String expireExpr = alwaysOffChecker.getId() + ".eval()";
-    Map<String, ConditionChecker> expireCheckers =
-        new HashMap<String, ConditionChecker>();
-    expireCheckers.put(alwaysOffChecker.getId(), alwaysOffChecker);
-    Condition expireCond = new Condition(expireCheckers, expireExpr);
-
-    Trigger t =
-        new Trigger.TriggerBuilder("azkaban",
-                                   "azkabanTest",
-                                   triggerCond,
-                                   expireCond,
-                                   actions).build();
-
-    return t;
-  }
-
 }
diff --git a/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerTest.java b/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerTest.java
index 33577fe..b3940bf 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/TriggerManagerTest.java
@@ -16,27 +16,25 @@
 
 package azkaban.trigger;
 
+import static org.junit.Assert.assertTrue;
+
+import azkaban.utils.Props;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-
-import azkaban.utils.Props;
-
 public class TriggerManagerTest {
 
   private TriggerLoader triggerLoader;
 
   @Before
   public void setup() throws TriggerException, TriggerManagerException {
-    triggerLoader = new MockTriggerLoader();
+    this.triggerLoader = new MockTriggerLoader();
 
   }
 
@@ -45,12 +43,13 @@ public class TriggerManagerTest {
 
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void triggerManagerSimpleTest() throws TriggerManagerException {
-    Props props = new Props();
+    final Props props = new Props();
     props.put("trigger.scan.interval", 4000);
-    TriggerManager triggerManager =
-        new TriggerManager(props, triggerLoader, null);
+    final TriggerManager triggerManager =
+        new TriggerManager(props, this.triggerLoader, null);
 
     triggerManager.registerCheckerType(ThresholdChecker.type,
         ThresholdChecker.class);
@@ -63,25 +62,25 @@ public class TriggerManagerTest {
         createDummyTrigger("test1", "triggerLoader", 10), "testUser");
     List<Trigger> triggers = triggerManager.getTriggers();
     assertTrue(triggers.size() == 1);
-    Trigger t1 = triggers.get(0);
+    final Trigger t1 = triggers.get(0);
     t1.setResetOnTrigger(false);
     triggerManager.updateTrigger(t1, "testUser");
-    ThresholdChecker checker1 =
+    final ThresholdChecker checker1 =
         (ThresholdChecker) t1.getTriggerCondition().getCheckers().values()
             .toArray()[0];
     assertTrue(t1.getSource().equals("triggerLoader"));
 
-    Trigger t2 =
+    final Trigger t2 =
         createDummyTrigger("test2: add new trigger", "addNewTriggerTest", 20);
     triggerManager.insertTrigger(t2, "testUser");
-    ThresholdChecker checker2 =
+    final ThresholdChecker checker2 =
         (ThresholdChecker) t2.getTriggerCondition().getCheckers().values()
             .toArray()[0];
 
     ThresholdChecker.setVal(15);
     try {
       Thread.sleep(2000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
@@ -93,7 +92,7 @@ public class TriggerManagerTest {
 
     try {
       Thread.sleep(2000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
@@ -106,7 +105,7 @@ public class TriggerManagerTest {
     ThresholdChecker.setVal(25);
     try {
       Thread.sleep(4000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
@@ -120,41 +119,71 @@ public class TriggerManagerTest {
 
   }
 
+  private Trigger createDummyTrigger(final String message, final String source,
+      final int threshold) {
+
+    final Map<String, ConditionChecker> checkers =
+        new HashMap<>();
+    final ConditionChecker checker =
+        new ThresholdChecker(ThresholdChecker.type, threshold);
+    checkers.put(checker.getId(), checker);
+
+    final List<TriggerAction> actions = new ArrayList<>();
+    final TriggerAction act = new DummyTriggerAction(message);
+    actions.add(act);
+
+    final String expr = checker.getId() + ".eval()";
+
+    final Condition triggerCond = new Condition(checkers, expr);
+    final Condition expireCond = new Condition(checkers, expr);
+
+    final Trigger fakeTrigger = new Trigger.TriggerBuilder("azkaban",
+        source,
+        triggerCond,
+        expireCond,
+        actions).build();
+
+    fakeTrigger.setResetOnTrigger(true);
+    fakeTrigger.setResetOnExpire(true);
+
+    return fakeTrigger;
+  }
+
   public static class MockTriggerLoader implements TriggerLoader {
 
-    private Map<Integer, Trigger> triggers = new HashMap<Integer, Trigger>();
+    private final Map<Integer, Trigger> triggers = new HashMap<>();
     private int idIndex = 0;
 
     @Override
-    public void addTrigger(Trigger t) throws TriggerLoaderException {
-      t.setTriggerId(idIndex++);
-      triggers.put(t.getTriggerId(), t);
+    public void addTrigger(final Trigger t) throws TriggerLoaderException {
+      t.setTriggerId(this.idIndex++);
+      this.triggers.put(t.getTriggerId(), t);
     }
 
     @Override
-    public void removeTrigger(Trigger s) throws TriggerLoaderException {
-      triggers.remove(s.getTriggerId());
+    public void removeTrigger(final Trigger s) throws TriggerLoaderException {
+      this.triggers.remove(s.getTriggerId());
 
     }
 
     @Override
-    public void updateTrigger(Trigger t) throws TriggerLoaderException {
-      triggers.put(t.getTriggerId(), t);
+    public void updateTrigger(final Trigger t) throws TriggerLoaderException {
+      this.triggers.put(t.getTriggerId(), t);
     }
 
     @Override
     public List<Trigger> loadTriggers() {
-      return new ArrayList<Trigger>(triggers.values());
+      return new ArrayList<>(this.triggers.values());
     }
 
     @Override
-    public Trigger loadTrigger(int triggerId) throws TriggerLoaderException {
+    public Trigger loadTrigger(final int triggerId) throws TriggerLoaderException {
       // TODO Auto-generated method stub
       return null;
     }
 
     @Override
-    public List<Trigger> getUpdatedTriggers(long lastUpdateTime)
+    public List<Trigger> getUpdatedTriggers(final long lastUpdateTime)
         throws TriggerLoaderException {
       // TODO Auto-generated method stub
       return null;
@@ -162,36 +191,6 @@ public class TriggerManagerTest {
 
   }
 
-  private Trigger createDummyTrigger(String message, String source,
-      int threshold) {
-
-    Map<String, ConditionChecker> checkers =
-        new HashMap<String, ConditionChecker>();
-    ConditionChecker checker =
-        new ThresholdChecker(ThresholdChecker.type, threshold);
-    checkers.put(checker.getId(), checker);
-
-    List<TriggerAction> actions = new ArrayList<TriggerAction>();
-    TriggerAction act = new DummyTriggerAction(message);
-    actions.add(act);
-
-    String expr = checker.getId() + ".eval()";
-
-    Condition triggerCond = new Condition(checkers, expr);
-    Condition expireCond = new Condition(checkers, expr);
-
-    Trigger fakeTrigger = new Trigger.TriggerBuilder("azkaban",
-        source,
-        triggerCond,
-        expireCond,
-        actions).build();
-
-    fakeTrigger.setResetOnTrigger(true);
-    fakeTrigger.setResetOnExpire(true);
-
-    return fakeTrigger;
-  }
-
   // public class MockCheckerLoader extends CheckerTypeLoader{
   //
   // @Override
diff --git a/azkaban-common/src/test/java/azkaban/trigger/TriggerTest.java b/azkaban-common/src/test/java/azkaban/trigger/TriggerTest.java
index 96f61fe..fdc974e 100644
--- a/azkaban-common/src/test/java/azkaban/trigger/TriggerTest.java
+++ b/azkaban-common/src/test/java/azkaban/trigger/TriggerTest.java
@@ -16,27 +16,24 @@
 
 package azkaban.trigger;
 
+import static org.junit.Assert.assertTrue;
+
+import azkaban.executor.ExecutionOptions;
+import azkaban.trigger.builtin.BasicTimeChecker;
+import azkaban.trigger.builtin.ExecuteFlowAction;
+import azkaban.utils.JSONUtils;
+import azkaban.utils.Props;
+import azkaban.utils.Utils;
 import java.io.File;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.joda.time.DateTime;
-
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-
-import azkaban.executor.ExecutionOptions;
-import azkaban.trigger.builtin.BasicTimeChecker;
-import azkaban.trigger.builtin.ExecuteFlowAction;
-import azkaban.utils.JSONUtils;
-import azkaban.utils.Props;
-import azkaban.utils.Utils;
-
 public class TriggerTest {
 
   private CheckerTypeLoader checkerLoader;
@@ -44,44 +41,45 @@ public class TriggerTest {
 
   @Before
   public void setup() throws TriggerException {
-    checkerLoader = new CheckerTypeLoader();
-    checkerLoader.init(new Props());
-    Condition.setCheckerLoader(checkerLoader);
-    actionLoader = new ActionTypeLoader();
-    actionLoader.init(new Props());
-    Trigger.setActionTypeLoader(actionLoader);
+    this.checkerLoader = new CheckerTypeLoader();
+    this.checkerLoader.init(new Props());
+    Condition.setCheckerLoader(this.checkerLoader);
+    this.actionLoader = new ActionTypeLoader();
+    this.actionLoader.init(new Props());
+    Trigger.setActionTypeLoader(this.actionLoader);
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void jsonConversionTest() throws Exception {
-    DateTime now = DateTime.now();
-    ConditionChecker checker1 =
+    final DateTime now = DateTime.now();
+    final ConditionChecker checker1 =
         new BasicTimeChecker("timeChecker1", now.getMillis(), now.getZone(),
             true, true, Utils.parsePeriodString("1h"), null);
-    Map<String, ConditionChecker> checkers1 =
-        new HashMap<String, ConditionChecker>();
+    final Map<String, ConditionChecker> checkers1 =
+        new HashMap<>();
     checkers1.put(checker1.getId(), checker1);
-    String expr1 = checker1.getId() + ".eval()";
-    Condition triggerCond = new Condition(checkers1, expr1);
-    Condition expireCond = new Condition(checkers1, expr1);
-    List<TriggerAction> actions = new ArrayList<TriggerAction>();
-    TriggerAction action =
+    final String expr1 = checker1.getId() + ".eval()";
+    final Condition triggerCond = new Condition(checkers1, expr1);
+    final Condition expireCond = new Condition(checkers1, expr1);
+    final List<TriggerAction> actions = new ArrayList<>();
+    final TriggerAction action =
         new ExecuteFlowAction("executeAction", 1, "testProj", "testFlow",
             "azkaban", new ExecutionOptions(), null);
     actions.add(action);
 
-    Trigger t = new Trigger.TriggerBuilder("azkaban",
+    final Trigger t = new Trigger.TriggerBuilder("azkaban",
         "test",
         triggerCond,
         expireCond,
         actions).build();
 
-    File temp = File.createTempFile("temptest", "temptest");
+    final File temp = File.createTempFile("temptest", "temptest");
     temp.deleteOnExit();
-    Object obj = t.toJson();
+    final Object obj = t.toJson();
     JSONUtils.toJSON(obj, temp);
 
-    Trigger t2 = Trigger.fromJson(JSONUtils.parseJSONFromFile(temp));
+    final Trigger t2 = Trigger.fromJson(JSONUtils.parseJSONFromFile(temp));
 
     assertTrue(t.getSource().equals(t2.getSource()));
     assertTrue(t.getTriggerId() == t2.getTriggerId());
diff --git a/azkaban-common/src/test/java/azkaban/user/PermissionTest.java b/azkaban-common/src/test/java/azkaban/user/PermissionTest.java
index e34a5b4..07497b3 100644
--- a/azkaban-common/src/test/java/azkaban/user/PermissionTest.java
+++ b/azkaban-common/src/test/java/azkaban/user/PermissionTest.java
@@ -16,15 +16,15 @@
 
 package azkaban.user;
 
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-
 import static org.junit.Assert.assertTrue;
 
 import azkaban.user.Permission.Type;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
 
 public class PermissionTest {
+
   @Before
   public void setUp() throws Exception {
   }
@@ -35,16 +35,16 @@ public class PermissionTest {
 
   @Test
   public void testEmptyPermissionCreation() throws Exception {
-    Permission permission = new Permission();
-    permission.addPermissionsByName(new String[] {});
+    final Permission permission = new Permission();
+    permission.addPermissionsByName(new String[]{});
   }
 
   @Test
   public void testSinglePermissionCreation() throws Exception {
-    Permission perm1 = new Permission();
+    final Permission perm1 = new Permission();
     perm1.addPermissionsByName("READ");
 
-    Permission perm2 = new Permission();
+    final Permission perm2 = new Permission();
     perm2.addPermission(Type.READ);
     info("Compare " + perm1.toString() + " and " + perm2.toString());
     assertTrue(perm1.equals(perm2));
@@ -52,58 +52,58 @@ public class PermissionTest {
 
   @Test
   public void testListPermissionCreation() throws Exception {
-    Permission perm1 = new Permission();
-    perm1.addPermissionsByName(new String[] { "READ", "EXECUTE" });
+    final Permission perm1 = new Permission();
+    perm1.addPermissionsByName(new String[]{"READ", "EXECUTE"});
 
-    Permission perm2 = new Permission();
-    perm2.addPermission(new Type[] { Type.EXECUTE, Type.READ });
+    final Permission perm2 = new Permission();
+    perm2.addPermission(new Type[]{Type.EXECUTE, Type.READ});
     info("Compare " + perm1.toString() + " and " + perm2.toString());
     assertTrue(perm1.equals(perm2));
   }
 
   @Test
   public void testRemovePermission() throws Exception {
-    Permission perm1 = new Permission();
-    perm1.addPermissionsByName(new String[] { "READ", "EXECUTE", "WRITE" });
+    final Permission perm1 = new Permission();
+    perm1.addPermissionsByName(new String[]{"READ", "EXECUTE", "WRITE"});
     perm1.removePermissions(Type.EXECUTE);
 
-    Permission perm2 = new Permission();
-    perm2.addPermission(new Type[] { Type.READ, Type.WRITE });
+    final Permission perm2 = new Permission();
+    perm2.addPermission(new Type[]{Type.READ, Type.WRITE});
     info("Compare " + perm1.toString() + " and " + perm2.toString());
     assertTrue(perm1.equals(perm2));
   }
 
   @Test
   public void testRemovePermissionByName() throws Exception {
-    Permission perm1 = new Permission();
-    perm1.addPermissionsByName(new String[] { "READ", "EXECUTE", "WRITE" });
+    final Permission perm1 = new Permission();
+    perm1.addPermissionsByName(new String[]{"READ", "EXECUTE", "WRITE"});
     perm1.removePermissionsByName("EXECUTE");
 
-    Permission perm2 = new Permission();
-    perm2.addPermission(new Type[] { Type.READ, Type.WRITE });
+    final Permission perm2 = new Permission();
+    perm2.addPermission(new Type[]{Type.READ, Type.WRITE});
     info("Compare " + perm1.toString() + " and " + perm2.toString());
     assertTrue(perm1.equals(perm2));
   }
 
   @Test
   public void testToAndFromObject() throws Exception {
-    Permission permission = new Permission();
+    final Permission permission = new Permission();
     permission
-        .addPermissionsByName(new String[] { "READ", "EXECUTE", "WRITE" });
+        .addPermissionsByName(new String[]{"READ", "EXECUTE", "WRITE"});
 
-    String[] array = permission.toStringArray();
-    Permission permission2 = new Permission();
+    final String[] array = permission.toStringArray();
+    final Permission permission2 = new Permission();
     permission2.addPermissionsByName(array);
     assertTrue(permission.equals(permission2));
   }
 
   @Test
   public void testFlags() throws Exception {
-    Permission permission = new Permission();
-    permission.addPermission(new Type[] { Type.READ, Type.WRITE });
+    final Permission permission = new Permission();
+    permission.addPermission(new Type[]{Type.READ, Type.WRITE});
 
-    int flags = permission.toFlags();
-    Permission permission2 = new Permission(flags);
+    final int flags = permission.toFlags();
+    final Permission permission2 = new Permission(flags);
 
     assertTrue(permission2.isPermissionSet(Type.READ));
     assertTrue(permission2.isPermissionSet(Type.WRITE));
@@ -113,10 +113,8 @@ public class PermissionTest {
 
   /**
    * Why? because it's quicker.
-   *
-   * @param message
    */
-  public void info(String message) {
+  public void info(final String message) {
     System.out.println(message);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/user/XmlUserManagerTest.java b/azkaban-common/src/test/java/azkaban/user/XmlUserManagerTest.java
index d5d93c2..823a52b 100644
--- a/azkaban-common/src/test/java/azkaban/user/XmlUserManagerTest.java
+++ b/azkaban-common/src/test/java/azkaban/user/XmlUserManagerTest.java
@@ -16,20 +16,19 @@
 
 package azkaban.user;
 
-import java.util.HashSet;
+import static org.junit.Assert.fail;
 
+import azkaban.utils.Props;
+import azkaban.utils.UndefinedPropertyException;
+import java.util.HashSet;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import static org.junit.Assert.fail;
-
-import azkaban.utils.Props;
-import azkaban.utils.UndefinedPropertyException;
-
 public class XmlUserManagerTest {
-  private Props baseProps = new Props();
+
+  private final Props baseProps = new Props();
 
   @Before
   public void setUp() throws Exception {
@@ -41,18 +40,15 @@ public class XmlUserManagerTest {
 
   /**
    * Testing for when the xml path isn't set in properties.
-   *
-   * @throws Exception
    */
   @Test
   public void testFilePropNotSet() throws Exception {
-    Props props = new Props(baseProps);
+    final Props props = new Props(this.baseProps);
 
     // Should throw
     try {
-      @SuppressWarnings("unused")
-      XmlUserManager manager = new XmlUserManager(props);
-    } catch (UndefinedPropertyException e) {
+      final XmlUserManager manager = new XmlUserManager(props);
+    } catch (final UndefinedPropertyException e) {
       return;
     }
 
@@ -61,101 +57,100 @@ public class XmlUserManagerTest {
 
   /**
    * Testing for when the xml path doesn't exist.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testDoNotExist() throws Exception {
-    Props props = new Props(baseProps);
+    final Props props = new Props(this.baseProps);
     props.put(XmlUserManager.XML_FILE_PARAM, "unit/test-conf/doNotExist.xml");
 
     try {
-      @SuppressWarnings("unused")
-      UserManager manager = new XmlUserManager(props);
-    } catch (RuntimeException e) {
+      final UserManager manager = new XmlUserManager(props);
+    } catch (final RuntimeException e) {
       return;
     }
 
     fail("XmlUserManager should throw an exception when the file doesn't exist");
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicLoad() throws Exception {
-    Props props = new Props(baseProps);
+    final Props props = new Props(this.baseProps);
     props.put(XmlUserManager.XML_FILE_PARAM,
         "unit/test-conf/azkaban-users-test1.xml");
 
     UserManager manager = null;
     try {
       manager = new XmlUserManager(props);
-    } catch (RuntimeException e) {
+    } catch (final RuntimeException e) {
       e.printStackTrace();
       fail("XmlUserManager should've found file azkaban-users.xml");
     }
 
     try {
       manager.getUser("user0", null);
-    } catch (UserManagerException e) {
+    } catch (final UserManagerException e) {
       System.out.println("Exception handled correctly: " + e.getMessage());
     }
 
     try {
       manager.getUser(null, "etw");
-    } catch (UserManagerException e) {
+    } catch (final UserManagerException e) {
       System.out.println("Exception handled correctly: " + e.getMessage());
     }
 
     try {
       manager.getUser("user0", "user0");
-    } catch (UserManagerException e) {
+    } catch (final UserManagerException e) {
       System.out.println("Exception handled correctly: " + e.getMessage());
     }
 
     try {
       manager.getUser("user0", "password0");
-    } catch (UserManagerException e) {
+    } catch (final UserManagerException e) {
       e.printStackTrace();
       fail("XmlUserManager should've returned a user.");
     }
 
-    User user0 = manager.getUser("user0", "password0");
+    final User user0 = manager.getUser("user0", "password0");
     checkUser(user0, "role0", "group0");
 
-    User user1 = manager.getUser("user1", "password1");
+    final User user1 = manager.getUser("user1", "password1");
     checkUser(user1, "role0,role1", "group1,group2");
 
-    User user2 = manager.getUser("user2", "password2");
+    final User user2 = manager.getUser("user2", "password2");
     checkUser(user2, "role0,role1,role2", "group1,group2,group3");
 
-    User user3 = manager.getUser("user3", "password3");
+    final User user3 = manager.getUser("user3", "password3");
     checkUser(user3, "role1,role2", "group1,group2");
 
-    User user4 = manager.getUser("user4", "password4");
+    final User user4 = manager.getUser("user4", "password4");
     checkUser(user4, "role1,role2", "group1,group2");
 
-    User user5 = manager.getUser("user5", "password5");
+    final User user5 = manager.getUser("user5", "password5");
     checkUser(user5, "role1,role2", "group1,group2");
 
-    User user6 = manager.getUser("user6", "password6");
+    final User user6 = manager.getUser("user6", "password6");
     checkUser(user6, "role3,role2", "group1,group2");
 
-    User user7 = manager.getUser("user7", "password7");
+    final User user7 = manager.getUser("user7", "password7");
     checkUser(user7, "", "group1");
 
-    User user8 = manager.getUser("user8", "password8");
+    final User user8 = manager.getUser("user8", "password8");
     checkUser(user8, "role3", "");
 
-    User user9 = manager.getUser("user9", "password9");
+    final User user9 = manager.getUser("user9", "password9");
     checkUser(user9, "", "");
   }
 
-  private void checkUser(User user, String rolesStr, String groupsStr) {
+  private void checkUser(final User user, final String rolesStr, final String groupsStr) {
     // Validating roles
-    HashSet<String> roleSet = new HashSet<String>(user.getRoles());
+    final HashSet<String> roleSet = new HashSet<>(user.getRoles());
     if (rolesStr.isEmpty()) {
       if (!roleSet.isEmpty()) {
         String outputRoleStr = "";
-        for (String role : roleSet) {
+        for (final String role : roleSet) {
           outputRoleStr += role + ",";
         }
         throw new RuntimeException("Roles mismatch for " + user.getUserId()
@@ -163,13 +158,13 @@ public class XmlUserManagerTest {
       }
     } else {
       String outputRoleStr = "";
-      for (String role : roleSet) {
+      for (final String role : roleSet) {
         outputRoleStr += role + ",";
       }
 
-      String[] splitRoles = rolesStr.split(",");
-      HashSet<String> expectedRoles = new HashSet<String>();
-      for (String role : splitRoles) {
+      final String[] splitRoles = rolesStr.split(",");
+      final HashSet<String> expectedRoles = new HashSet<>();
+      for (final String role : splitRoles) {
         if (!roleSet.contains(role)) {
           throw new RuntimeException("Roles mismatch for user "
               + user.getUserId() + " role " + role + ". Expected roles to "
@@ -178,7 +173,7 @@ public class XmlUserManagerTest {
         expectedRoles.add(role);
       }
 
-      for (String role : roleSet) {
+      for (final String role : roleSet) {
         if (!expectedRoles.contains(role)) {
           throw new RuntimeException("Roles mismatch for user "
               + user.getUserId() + " role " + role + ". Expected roles to "
@@ -187,11 +182,11 @@ public class XmlUserManagerTest {
       }
     }
 
-    HashSet<String> groupSet = new HashSet<String>(user.getGroups());
+    final HashSet<String> groupSet = new HashSet<>(user.getGroups());
     if (groupsStr.isEmpty()) {
       if (!groupSet.isEmpty()) {
         String outputGroupStr = "";
-        for (String role : roleSet) {
+        for (final String role : roleSet) {
           outputGroupStr += role + ",";
         }
         throw new RuntimeException("Roles mismatch for " + user.getUserId()
@@ -199,13 +194,13 @@ public class XmlUserManagerTest {
       }
     } else {
       String outputGroupStr = "";
-      for (String group : groupSet) {
+      for (final String group : groupSet) {
         outputGroupStr += group + ",";
       }
 
-      String[] splitGroups = groupsStr.split(",");
-      HashSet<String> expectedGroups = new HashSet<String>();
-      for (String group : splitGroups) {
+      final String[] splitGroups = groupsStr.split(",");
+      final HashSet<String> expectedGroups = new HashSet<>();
+      for (final String group : splitGroups) {
         if (!groupSet.contains(group)) {
           throw new RuntimeException("Groups mismatch for user "
               + user.getUserId() + " group " + group + ". Expected groups to "
@@ -214,7 +209,7 @@ public class XmlUserManagerTest {
         expectedGroups.add(group);
       }
 
-      for (String group : groupSet) {
+      for (final String group : groupSet) {
         if (!expectedGroups.contains(group)) {
           throw new RuntimeException("Groups mismatch for user "
               + user.getUserId() + " group " + group + ". Expected groups to "
diff --git a/azkaban-common/src/test/java/azkaban/utils/AbstractMailerTest.java b/azkaban-common/src/test/java/azkaban/utils/AbstractMailerTest.java
index 5581201..c1ef290 100644
--- a/azkaban-common/src/test/java/azkaban/utils/AbstractMailerTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/AbstractMailerTest.java
@@ -15,58 +15,58 @@
  */
 package azkaban.utils;
 
-import org.junit.Before;
-import org.junit.Test;
-
 import java.util.ArrayList;
 import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
 
 public class AbstractMailerTest {
 
-    List<String> senderList = new ArrayList<String>();
+  List<String> senderList = new ArrayList<>();
 
+  public static Props createMailProperties() {
+    final Props props = new Props();
+    props.put("mail.user", "somebody");
+    props.put("mail.password", "pwd");
+    props.put("mail.sender", "somebody@xxx.com");
+    props.put("server.port", "114");
+    props.put("jetty.use.ssl", "false");
+    props.put("server.useSSL", "false");
+    props.put("jetty.port", "8786");
+    return props;
 
-    @Before
-    public void setUp() throws Exception {
-        senderList.add("sender@domain.com");
-    }
-    /**
-     * test emailMessage properties
-     */
-    @Test
-    public void testCreateEmailMessage(){
+  }
 
-        Props props = createMailProperties();
-        props.put("mail.port","445");
-        AbstractMailer mailer = new AbstractMailer(props);
-        EmailMessage emailMessage = mailer.createEmailMessage("subject","text/html",senderList);
+  @Before
+  public void setUp() throws Exception {
+    this.senderList.add("sender@domain.com");
+  }
 
-        assert emailMessage.getMailPort()==445;
+  /**
+   * test emailMessage properties
+   */
+  @Test
+  public void testCreateEmailMessage() {
 
+    final Props props = createMailProperties();
+    props.put("mail.port", "445");
+    final AbstractMailer mailer = new AbstractMailer(props);
+    final EmailMessage emailMessage = mailer.createEmailMessage("subject", "text/html",
+        this.senderList);
 
-    }
+    assert emailMessage.getMailPort() == 445;
 
-    @Test
-    public void testCreateDefaultEmailMessage()
-    {
-        Props defaultProps = createMailProperties();
-        AbstractMailer mailer = new AbstractMailer(defaultProps);
-        EmailMessage emailMessage = mailer.createEmailMessage("subject","text/html",senderList);
-        assert emailMessage.getMailPort()==25;
 
-    }
+  }
 
-    public static  Props  createMailProperties(){
-        Props props = new Props();
-        props.put("mail.user","somebody");
-        props.put("mail.password","pwd");
-        props.put("mail.sender","somebody@xxx.com");
-        props.put("server.port","114");
-        props.put("jetty.use.ssl","false");
-        props.put("server.useSSL","false");
-        props.put("jetty.port","8786");
-        return props;
+  @Test
+  public void testCreateDefaultEmailMessage() {
+    final Props defaultProps = createMailProperties();
+    final AbstractMailer mailer = new AbstractMailer(defaultProps);
+    final EmailMessage emailMessage = mailer.createEmailMessage("subject", "text/html",
+        this.senderList);
+    assert emailMessage.getMailPort() == 25;
 
-    }
+  }
 
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/EmailerTest.java b/azkaban-common/src/test/java/azkaban/utils/EmailerTest.java
index 95d81c2..f0e3563 100644
--- a/azkaban-common/src/test/java/azkaban/utils/EmailerTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/EmailerTest.java
@@ -20,101 +20,89 @@ import azkaban.flow.Flow;
 import azkaban.project.DirectoryFlowLoader;
 import azkaban.project.Project;
 import azkaban.test.executions.TestExecutions;
-import com.google.common.io.Resources;
+import java.util.ArrayList;
+import java.util.List;
 import org.apache.log4j.Logger;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Properties;
-
 
 public class EmailerTest {
 
-    String host = "smtp.domain.com";//smtp server address
-    int mailPort = 25;//smtp server port
-    String sender = "somebody@domain.com";//sender email address
-    String user = "somebody@domain.com";// the sender username
-    String password = "pwd"; //the sender password
-
-    String receiveAddr = "receive@domain.com";//receiver email address
-    List<String> receiveAddrList = new ArrayList<String>();
-
-    private Project project;
-    private Props props;
-
-
-
-
-    @Before
-    public void setUp() throws Exception {
-        receiveAddrList.add(receiveAddr);
-        project = new Project(11, "myTestProject");
-        Logger logger = Logger.getLogger(this.getClass());
-
-        props =  createMailProperties();
-        DirectoryFlowLoader loader = new DirectoryFlowLoader(props, logger);
-        loader.loadProjectFlow(project, TestExecutions.getFlowDir("embedded"));
-        Assert.assertEquals(0, loader.getErrors().size());
-        project.setFlows(loader.getFlowMap());
-        project.setVersion(123);
-    }
-
-
-    /**
-     * this is an integration test for Emailer sending  email.
-     * if you want to run this case and send email successfully,
-     * please remove @Ignore and make sure these variable{host,mailPort,password,receiveAddr} are set to real values.
-     * the test will currently succeed because email sending errors are caught,
-     * you need to manually verify that a real email is sent and received.
-     */
-    @Ignore
-    @Test
-    public void testSendEmail() throws Exception{
-
-        Flow flow = project.getFlow("jobe");
-        flow.addFailureEmails(receiveAddrList);
-        Assert.assertNotNull(flow);
-
-        ExecutableFlow exFlow = new ExecutableFlow(project, flow);
-        Emailer emailer = new Emailer(props);
-        emailer.sendErrorEmail(exFlow);
-
-    }
-    @Test
-    public void testCreateEmailMessage(){
-        Emailer emailer = new Emailer(props);
-        EmailMessage em = emailer.createEmailMessage("subject","text/html",receiveAddrList);
-        assert  em.getMailPort() == mailPort;
-
-    }
-
-
-
-
-    public   Props  createMailProperties(){
-        Props props = new Props();
-        props.put("mail.user",user);
-        props.put("mail.password",password);
-        props.put("mail.sender",sender);
-        props.put("mail.host",host);
-        props.put("mail.port",mailPort);
-        props.put("job.failure.email",receiveAddr);
-        props.put("server.port","114");
-        props.put("jetty.use.ssl","false");
-        props.put("server.useSSL","false");
-        props.put("jetty.port","8786");
-        return props;
-    }
-
-
-
+  String host = "smtp.domain.com";//smtp server address
+  int mailPort = 25;//smtp server port
+  String sender = "somebody@domain.com";//sender email address
+  String user = "somebody@domain.com";// the sender username
+  String password = "pwd"; //the sender password
+
+  String receiveAddr = "receive@domain.com";//receiver email address
+  List<String> receiveAddrList = new ArrayList<>();
+
+  private Project project;
+  private Props props;
+
+
+  @Before
+  public void setUp() throws Exception {
+    this.receiveAddrList.add(this.receiveAddr);
+    this.project = new Project(11, "myTestProject");
+    final Logger logger = Logger.getLogger(this.getClass());
+
+    this.props = createMailProperties();
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(this.props, logger);
+    loader.loadProjectFlow(this.project, TestExecutions.getFlowDir("embedded"));
+    Assert.assertEquals(0, loader.getErrors().size());
+    this.project.setFlows(loader.getFlowMap());
+    this.project.setVersion(123);
+  }
+
+
+  /**
+   * this is an integration test for Emailer sending  email. if you want to run this case and send
+   * email successfully, please remove @Ignore and make sure these variable{host,mailPort,password,receiveAddr}
+   * are set to real values. the test will currently succeed because email sending errors are
+   * caught, you need to manually verify that a real email is sent and received.
+   */
+  @Ignore
+  @Test
+  public void testSendEmail() throws Exception {
+
+    final Flow flow = this.project.getFlow("jobe");
+    flow.addFailureEmails(this.receiveAddrList);
+    Assert.assertNotNull(flow);
+
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
+    final Emailer emailer = new Emailer(this.props);
+    emailer.sendErrorEmail(exFlow);
+
+  }
+
+  @Test
+  public void testCreateEmailMessage() {
+    final Emailer emailer = new Emailer(this.props);
+    final EmailMessage em = emailer
+        .createEmailMessage("subject", "text/html", this.receiveAddrList);
+    assert em.getMailPort() == this.mailPort;
+
+  }
+
+
+  public Props createMailProperties() {
+    final Props props = new Props();
+    props.put("mail.user", this.user);
+    props.put("mail.password", this.password);
+    props.put("mail.sender", this.sender);
+    props.put("mail.host", this.host);
+    props.put("mail.port", this.mailPort);
+    props.put("job.failure.email", this.receiveAddr);
+    props.put("server.port", "114");
+    props.put("jetty.use.ssl", "false");
+    props.put("server.useSSL", "false");
+    props.put("jetty.port", "8786");
+    return props;
+  }
 
 
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/EmailMessageTest.java b/azkaban-common/src/test/java/azkaban/utils/EmailMessageTest.java
index 429c66b..927045a 100644
--- a/azkaban-common/src/test/java/azkaban/utils/EmailMessageTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/EmailMessageTest.java
@@ -16,14 +16,13 @@
 
 package azkaban.utils;
 
+import java.io.IOException;
+import javax.mail.MessagingException;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import javax.mail.MessagingException;
-import java.io.IOException;
-
 public class EmailMessageTest {
 
   String host = "";
@@ -38,8 +37,8 @@ public class EmailMessageTest {
 
   @Before
   public void setUp() throws Exception {
-    em = new EmailMessage(host, port, user, password);
-    em.setFromAddress(sender);
+    this.em = new EmailMessage(this.host, this.port, this.user, this.password);
+    this.em.setFromAddress(this.sender);
   }
 
   @After
@@ -49,13 +48,13 @@ public class EmailMessageTest {
   @Ignore
   @Test
   public void testSendEmail() throws IOException {
-    em.addToAddress(toAddr);
+    this.em.addToAddress(this.toAddr);
     // em.addToAddress("cyu@linkedin.com");
-    em.setSubject("azkaban test email");
-    em.setBody("azkaban test email");
+    this.em.setSubject("azkaban test email");
+    this.em.setBody("azkaban test email");
     try {
-      em.sendEmail();
-    } catch (MessagingException e) {
+      this.em.sendEmail();
+    } catch (final MessagingException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
diff --git a/azkaban-common/src/test/java/azkaban/utils/ExternalLinkUtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/ExternalLinkUtilsTest.java
index 957759b..371b670 100644
--- a/azkaban-common/src/test/java/azkaban/utils/ExternalLinkUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/ExternalLinkUtilsTest.java
@@ -21,53 +21,41 @@ import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.when;
 
 import azkaban.Constants;
-
 import javax.servlet.http.HttpServletRequest;
-
 import org.junit.Before;
 import org.junit.Test;
 
 public class ExternalLinkUtilsTest {
-  private Props azkProps;
-
-  private Props jobProps;
-
-  private String jobId;
-
-  private HttpServletRequest mockRequest;
 
   private static final String EXEC_URL = "http://localhost:8081/executor";
-
   private static final String EXEC_QUERY_STRING = "execid=1";
-
   private static final String EXTERNAL_ANALYZER_TOPIC = "elephant";
-
   private static final String EXTERNAL_ANALYZER_URL_VALID_FORMAT =
       "http://elephant.linkedin.com/search?q=${url}";
-
   private static final String EXTERNAL_ANALYZER_EXPECTED_URL =
       "http://elephant.linkedin.com/search?q="
           + "http%3A%2F%2Flocalhost%3A8081%2Fexecutor%3Fexecid%3D1";
-
   private static final String EXTERNAL_LOGVIEWER_TOPIC = "kibana";
-
   private static final String EXTERNAL_LOGVIEWER_URL_VALID_FORMAT =
       "http://kibana.linkedin.com/search?jobid=${jobid}&&execid=${execid}";
-
   private static final String EXTERNAL_LOGVIEWER_EXPECTED_URL =
       "http://kibana.linkedin.com/search?jobid=Some%20%2B%20job&&execid=1";
+  private Props azkProps;
+  private Props jobProps;
+  private String jobId;
+  private HttpServletRequest mockRequest;
 
   @Before
   public void setUp() {
     // Empty server configuration
-    azkProps = new Props();
+    this.azkProps = new Props();
 
     // Job configuration consisting of only an exec id and job id
-    jobProps = new Props();
-    jobProps.put(Constants.FlowProperties.AZKABAN_FLOW_EXEC_ID, 1);
-    jobId = "Some + job";
+    this.jobProps = new Props();
+    this.jobProps.put(Constants.FlowProperties.AZKABAN_FLOW_EXEC_ID, 1);
+    this.jobId = "Some + job";
 
-    mockRequest = mock(HttpServletRequest.class);
+    this.mockRequest = mock(HttpServletRequest.class);
   }
 
   /**
@@ -76,16 +64,18 @@ public class ExternalLinkUtilsTest {
    */
   @Test
   public void testGetExternalAnalyzerValidFormat() {
-    azkProps.put(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_ANALYZER_TOPIC, EXTERNAL_ANALYZER_TOPIC);
-    azkProps.put(
-        Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL.replace("${topic}", EXTERNAL_ANALYZER_TOPIC),
+    this.azkProps.put(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_ANALYZER_TOPIC,
+        EXTERNAL_ANALYZER_TOPIC);
+    this.azkProps.put(
+        Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL
+            .replace("${topic}", EXTERNAL_ANALYZER_TOPIC),
         EXTERNAL_ANALYZER_URL_VALID_FORMAT);
 
-    when(mockRequest.getRequestURL()).thenReturn(new StringBuffer(EXEC_URL));
-    when(mockRequest.getQueryString()).thenReturn(EXEC_QUERY_STRING);
+    when(this.mockRequest.getRequestURL()).thenReturn(new StringBuffer(EXEC_URL));
+    when(this.mockRequest.getQueryString()).thenReturn(EXEC_QUERY_STRING);
 
-    String externalURL =
-        ExternalLinkUtils.getExternalAnalyzerOnReq(azkProps, mockRequest);
+    final String externalURL =
+        ExternalLinkUtils.getExternalAnalyzerOnReq(this.azkProps, this.mockRequest);
     assertTrue(externalURL.equals(EXTERNAL_ANALYZER_EXPECTED_URL));
   }
 
@@ -95,13 +85,15 @@ public class ExternalLinkUtilsTest {
    */
   @Test
   public void testGetExternalLogViewerValidFormat() {
-    azkProps.put(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_LOGVIEWER_TOPIC, EXTERNAL_LOGVIEWER_TOPIC);
-    azkProps.put(
-        Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL.replace("${topic}", EXTERNAL_LOGVIEWER_TOPIC),
+    this.azkProps.put(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_LOGVIEWER_TOPIC,
+        EXTERNAL_LOGVIEWER_TOPIC);
+    this.azkProps.put(
+        Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL
+            .replace("${topic}", EXTERNAL_LOGVIEWER_TOPIC),
         EXTERNAL_LOGVIEWER_URL_VALID_FORMAT);
 
-    String externalURL =
-        ExternalLinkUtils.getExternalLogViewer(azkProps, jobId, jobProps);
+    final String externalURL =
+        ExternalLinkUtils.getExternalLogViewer(this.azkProps, this.jobId, this.jobProps);
     assertTrue(externalURL.equals(EXTERNAL_LOGVIEWER_EXPECTED_URL));
   }
 
@@ -111,8 +103,8 @@ public class ExternalLinkUtilsTest {
    */
   @Test
   public void testGetExternalAnalyzerNotConfigured() {
-    String executionExternalLinkURL =
-        ExternalLinkUtils.getExternalAnalyzerOnReq(azkProps, mockRequest);
+    final String executionExternalLinkURL =
+        ExternalLinkUtils.getExternalAnalyzerOnReq(this.azkProps, this.mockRequest);
     assertTrue(executionExternalLinkURL.equals(""));
   }
 
@@ -122,8 +114,8 @@ public class ExternalLinkUtilsTest {
    */
   @Test
   public void testGetLogViewerNotConfigured() {
-    String executionExternalLinkURL =
-        ExternalLinkUtils.getExternalLogViewer(azkProps, jobId, jobProps);
+    final String executionExternalLinkURL =
+        ExternalLinkUtils.getExternalLogViewer(this.azkProps, this.jobId, this.jobProps);
     assertTrue(executionExternalLinkURL.equals(""));
   }
 
@@ -141,11 +133,14 @@ public class ExternalLinkUtilsTest {
   }
 
   /**
-   * Make sure that URLs for analyzers and logviewers are fetched correctly by setting it manually and then fetching them
+   * Make sure that URLs for analyzers and logviewers are fetched correctly by setting it manually
+   * and then fetching them
    */
   @Test
   public void testFetchURL() {
-    azkProps.put(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL.replace("${topic}", "someTopic"), "This is a link");
-    assertTrue(ExternalLinkUtils.getURLForTopic("someTopic", azkProps).equals("This is a link"));
+    this.azkProps.put(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_TOPIC_URL
+        .replace("${topic}", "someTopic"), "This is a link");
+    assertTrue(
+        ExternalLinkUtils.getURLForTopic("someTopic", this.azkProps).equals("This is a link"));
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/FileIOUtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/FileIOUtilsTest.java
index 884580f..0b9b6c3 100644
--- a/azkaban-common/src/test/java/azkaban/utils/FileIOUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/FileIOUtilsTest.java
@@ -16,6 +16,9 @@
 
 package azkaban.utils;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
@@ -25,70 +28,66 @@ import org.apache.commons.io.FileUtils;
 import org.apache.commons.io.comparator.NameFileComparator;
 import org.junit.After;
 import org.junit.Before;
-import org.junit.Test;
 import org.junit.Rule;
+import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.assertNotNull;
-
 public class FileIOUtilsTest {
-  private File sourceDir, destDir, baseDir;
+
   @Rule
   public TemporaryFolder temp = new TemporaryFolder();
+  private File sourceDir, destDir, baseDir;
 
   @Before
   public void setUp() throws Exception {
     // setup base dir
-    baseDir = temp.newFolder("base");
-    File file1 = new File(baseDir.getAbsolutePath()+"/a.out");
-    File file2 = new File(baseDir.getAbsolutePath()+"/testdir");
-    File file3 = new File(file2.getAbsolutePath()+"/b.out");
+    this.baseDir = this.temp.newFolder("base");
+    final File file1 = new File(this.baseDir.getAbsolutePath() + "/a.out");
+    final File file2 = new File(this.baseDir.getAbsolutePath() + "/testdir");
+    final File file3 = new File(file2.getAbsolutePath() + "/b.out");
     file1.createNewFile();
     file2.mkdir();
     file3.createNewFile();
 
-
-    byte[] fileData = new byte[]{1,2,3};
+    byte[] fileData = new byte[]{1, 2, 3};
     FileOutputStream out = new FileOutputStream(file1);
     out.write(fileData);
     out.close();
 
-    fileData = new byte[]{2,3,4};
+    fileData = new byte[]{2, 3, 4};
     out = new FileOutputStream(file3);
     out.write(fileData);
     out.close();
 
-    sourceDir = temp.newFolder("src");
-    FileUtils.copyDirectory(baseDir, sourceDir);
+    this.sourceDir = this.temp.newFolder("src");
+    FileUtils.copyDirectory(this.baseDir, this.sourceDir);
 
     // setup target dir
-    destDir = temp.newFolder("dest");
+    this.destDir = this.temp.newFolder("dest");
   }
 
   @After
   public void tearDown() throws Exception {
-    temp.delete();
-    FileUtils.deleteDirectory(baseDir);
-    FileUtils.deleteDirectory(sourceDir);
-    FileUtils.deleteDirectory(destDir);
+    this.temp.delete();
+    FileUtils.deleteDirectory(this.baseDir);
+    FileUtils.deleteDirectory(this.sourceDir);
+    FileUtils.deleteDirectory(this.destDir);
   }
 
   @Test
   public void testHardlinkCopy() throws IOException {
-    FileIOUtils.createDeepHardlink(sourceDir, destDir);
-    assertTrue(areDirsEqual(sourceDir, destDir, true));
-    FileUtils.deleteDirectory(destDir);
-    assertTrue(areDirsEqual(baseDir, sourceDir, true));
+    FileIOUtils.createDeepHardlink(this.sourceDir, this.destDir);
+    assertTrue(areDirsEqual(this.sourceDir, this.destDir, true));
+    FileUtils.deleteDirectory(this.destDir);
+    assertTrue(areDirsEqual(this.baseDir, this.sourceDir, true));
   }
 
   @Test
   public void testHardlinkCopyNonSource() {
     boolean exception = false;
     try {
-      FileIOUtils.createDeepHardlink(new File(sourceDir, "idonotexist"), destDir);
-    } catch (IOException e) {
+      FileIOUtils.createDeepHardlink(new File(this.sourceDir, "idonotexist"), this.destDir);
+    } catch (final IOException e) {
       System.out.println(e.getMessage());
       System.out.println("Handled this case nicely.");
       exception = true;
@@ -97,58 +96,64 @@ public class FileIOUtilsTest {
     assertTrue(exception);
   }
 
-  private boolean areDirsEqualUtil(File file1, File file2, boolean isRoot, boolean ignoreRoot) throws IOException {
-    if(!file1.getName().equals(file2.getName())) {
-      if(!isRoot && ignoreRoot) return false;
+  private boolean areDirsEqualUtil(final File file1, final File file2, final boolean isRoot,
+      final boolean ignoreRoot)
+      throws IOException {
+    if (!file1.getName().equals(file2.getName())) {
+      if (!isRoot && ignoreRoot) {
+        return false;
+      }
     }
-    if(file1.isDirectory() && file2.isDirectory()) {
-      if(file1.listFiles().length != file2.listFiles().length) {
+    if (file1.isDirectory() && file2.isDirectory()) {
+      if (file1.listFiles().length != file2.listFiles().length) {
         return false;
       }
-      File[] fileList1 = file1.listFiles(), fileList2 = file2.listFiles();
+      final File[] fileList1 = file1.listFiles();
+      final File[] fileList2 = file2.listFiles();
       Arrays.sort(fileList1, NameFileComparator.NAME_COMPARATOR);
       Arrays.sort(fileList2, NameFileComparator.NAME_COMPARATOR);
 
-      for(int i = 0; i < fileList1.length; i++) {
-        if(!areDirsEqualUtil(fileList1[i], fileList2[i], false, ignoreRoot)) {
+      for (int i = 0; i < fileList1.length; i++) {
+        if (!areDirsEqualUtil(fileList1[i], fileList2[i], false, ignoreRoot)) {
           return false;
         }
       }
       return true;
-    }
-    else if(file1.isFile() && file2.isFile()) {
+    } else if (file1.isFile() && file2.isFile()) {
       return file1.getName().equals(file2.getName()) && FileUtils.contentEquals(file1, file2);
+    } else {
+      return false;
     }
-    else return false;
   }
 
 
   // check if two dirs are structurally same and contains files of same content
-  private boolean areDirsEqual(File file1, File file2, boolean ignoreRoot) throws IOException {
+  private boolean areDirsEqual(final File file1, final File file2, final boolean ignoreRoot)
+      throws IOException {
     return areDirsEqualUtil(file1, file2, true, ignoreRoot);
   }
 
   @Test
   public void testAsciiUTF() throws IOException {
-    String foreignText = "abcdefghijklmnopqrstuvwxyz";
-    byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
+    final String foreignText = "abcdefghijklmnopqrstuvwxyz";
+    final byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
 
-    int length = utf8ByteArray.length;
+    final int length = utf8ByteArray.length;
     System.out.println("char length:" + foreignText.length() +
         " utf8BytesLength:" + utf8ByteArray.length + " for:" + foreignText);
 
-    Pair<Integer,Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1,
+    final Pair<Integer, Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1,
         length - 6);
     System.out.println("Pair :" + pair.toString());
 
-    String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
+    final String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
     System.out.println("recreatedString:" + recreatedString);
 
-    String correctString = new String(utf8ByteArray, pair.getFirst(),
+    final String correctString = new String(utf8ByteArray, pair.getFirst(),
         pair.getSecond(), "UTF-8");
     System.out.println("correctString:" + correctString);
 
-    assertEquals(pair, new Pair<Integer,Integer>(1, 20));
+    assertEquals(pair, new Pair<>(1, 20));
     // Two characters stripped from this.
     assertEquals(correctString.length(), foreignText.length() - 6);
 
@@ -156,47 +161,46 @@ public class FileIOUtilsTest {
 
   @Test
   public void testForeignUTF() throws IOException {
-    String foreignText = "안녕하세요, 제 이름은 박병호입니다";
-    byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
+    final String foreignText = "안녕하세요, 제 이름은 박병호입니다";
+    final byte[] utf8ByteArray = createUTF8ByteArray(foreignText);
 
-    int length = utf8ByteArray.length;
+    final int length = utf8ByteArray.length;
     System.out.println("char length:" + foreignText.length()
         + " utf8BytesLength:" + utf8ByteArray.length + " for:" + foreignText);
 
-    Pair<Integer,Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1,
+    final Pair<Integer, Integer> pair = FileIOUtils.getUtf8Range(utf8ByteArray, 1,
         length - 6);
     System.out.println("Pair :" + pair.toString());
 
-    String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
+    final String recreatedString = new String(utf8ByteArray, 1, length - 6, "UTF-8");
     System.out.println("recreatedString:" + recreatedString);
 
     String correctString = new String(utf8ByteArray, pair.getFirst(),
         pair.getSecond(), "UTF-8");
     System.out.println("correctString:" + correctString);
 
-    assertEquals(pair, new Pair<Integer,Integer>(3, 40));
+    assertEquals(pair, new Pair<>(3, 40));
     // Two characters stripped from this.
     assertEquals(correctString.length(), foreignText.length() - 3);
 
-
     // Testing mixed bytes
-    String mixedText = "abc안녕하세요, 제 이름은 박병호입니다";
-    byte[] mixedBytes = createUTF8ByteArray(mixedText);
-    Pair<Integer,Integer> pair2 = FileIOUtils.getUtf8Range(mixedBytes, 1,
+    final String mixedText = "abc안녕하세요, 제 이름은 박병호입니다";
+    final byte[] mixedBytes = createUTF8ByteArray(mixedText);
+    final Pair<Integer, Integer> pair2 = FileIOUtils.getUtf8Range(mixedBytes, 1,
         length - 4);
     correctString = new String(mixedBytes, pair2.getFirst(), pair2.getSecond(),
         "UTF-8");
     System.out.println("correctString:" + correctString);
-    assertEquals(pair2, new Pair<Integer,Integer>(1, 45));
+    assertEquals(pair2, new Pair<>(1, 45));
     // Two characters stripped from this.
     assertEquals(correctString.length(), mixedText.length() - 3);
   }
 
-  private byte[] createUTF8ByteArray(String text) {
-    byte[] textBytes= null;
+  private byte[] createUTF8ByteArray(final String text) {
+    byte[] textBytes = null;
     try {
       textBytes = text.getBytes("UTF-8");
-    } catch (UnsupportedEncodingException e) {
+    } catch (final UnsupportedEncodingException e) {
       e.printStackTrace();
     }
     return textBytes;
diff --git a/azkaban-common/src/test/java/azkaban/utils/JsonUtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/JsonUtilsTest.java
index c48946b..e6a49f6 100644
--- a/azkaban-common/src/test/java/azkaban/utils/JsonUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/JsonUtilsTest.java
@@ -20,55 +20,53 @@ import java.io.IOException;
 import java.io.StringWriter;
 import java.util.HashMap;
 import java.util.Map;
-
 import org.junit.Assert;
 import org.junit.Test;
 
 public class JsonUtilsTest {
+
+  private static void checkInAndOut(final Map<String, String> before,
+      final Map<String, String> after) {
+    for (final Map.Entry<String, String> entry : before.entrySet()) {
+      final String key = entry.getKey();
+      final String value = entry.getValue();
+
+      final String retValue = after.get(key);
+      Assert.assertEquals(value, retValue);
+    }
+  }
+
   @Test
   public void writePropsNoJarDependencyTest1() throws IOException {
-    Map<String, String> test = new HashMap<String, String>();
+    final Map<String, String> test = new HashMap<>();
     test.put("\"myTest\n\b", "myValue\t\\");
     test.put("normalKey", "Other key");
 
-    StringWriter writer = new StringWriter();
+    final StringWriter writer = new StringWriter();
     JSONUtils.writePropsNoJarDependency(test, writer);
 
-    String jsonStr = writer.toString();
+    final String jsonStr = writer.toString();
     System.out.println(writer.toString());
 
-    @SuppressWarnings("unchecked")
-    Map<String, String> result =
+    final Map<String, String> result =
         (Map<String, String>) JSONUtils.parseJSONFromString(jsonStr);
     checkInAndOut(test, result);
   }
 
   @Test
   public void writePropsNoJarDependencyTest2() throws IOException {
-    Map<String, String> test = new HashMap<String, String>();
+    final Map<String, String> test = new HashMap<>();
     test.put("\"myTest\n\b", "myValue\t\\");
 
-    StringWriter writer = new StringWriter();
+    final StringWriter writer = new StringWriter();
     JSONUtils.writePropsNoJarDependency(test, writer);
 
-    String jsonStr = writer.toString();
+    final String jsonStr = writer.toString();
     System.out.println(writer.toString());
 
-    @SuppressWarnings("unchecked")
-    Map<String, String> result =
+    final Map<String, String> result =
         (Map<String, String>) JSONUtils.parseJSONFromString(jsonStr);
     checkInAndOut(test, result);
   }
 
-  private static void checkInAndOut(Map<String, String> before,
-      Map<String, String> after) {
-    for (Map.Entry<String, String> entry : before.entrySet()) {
-      String key = entry.getKey();
-      String value = entry.getValue();
-
-      String retValue = after.get(key);
-      Assert.assertEquals(value, retValue);
-    }
-  }
-
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/MemUtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/MemUtilsTest.java
index 68e7235..84ea05c 100644
--- a/azkaban-common/src/test/java/azkaban/utils/MemUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/MemUtilsTest.java
@@ -4,6 +4,7 @@ import org.junit.Assert;
 import org.junit.Test;
 
 public class MemUtilsTest {
+
   @Test
   public void testConversion() {
     Assert.assertEquals(Utils.parseMemString("1024"), 1L);
@@ -26,7 +27,7 @@ public class MemUtilsTest {
     badFormatHelper("1KB");
     badFormatHelper("1MB");
     badFormatHelper("1GB");
-    
+
     badFormatHelper("1kb");
     badFormatHelper("1mb");
     badFormatHelper("1gb");
@@ -39,12 +40,12 @@ public class MemUtilsTest {
     badFormatHelper("100f");
     badFormatHelper("100abcdc");
   }
-  
-  private void badFormatHelper(String str) {
+
+  private void badFormatHelper(final String str) {
     try {
       Utils.parseMemString(str);
       Assert.fail("should get a runtime exception");
-    } catch (Exception e) {
+    } catch (final Exception e) {
       Assert.assertEquals(e instanceof NumberFormatException, true);
     }
   }
diff --git a/azkaban-common/src/test/java/azkaban/utils/OsMemoryUtilTest.java b/azkaban-common/src/test/java/azkaban/utils/OsMemoryUtilTest.java
index 454f05a..bfe1b17 100644
--- a/azkaban-common/src/test/java/azkaban/utils/OsMemoryUtilTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/OsMemoryUtilTest.java
@@ -1,5 +1,8 @@
 package azkaban.utils;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.nio.file.Paths;
@@ -8,16 +11,15 @@ import java.util.Collections;
 import java.util.List;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-
 
 public class OsMemoryUtilTest {
-  private OsMemoryUtil util = new OsMemoryUtil();
+
+  private final OsMemoryUtil util = new OsMemoryUtil();
 
   @Test
   public void canReadMemInfoFileIfExists() {
-    long size = util.getOsTotalFreeMemorySize();
-    Path memFile = Paths.get("/proc/meminfo");
+    final long size = this.util.getOsTotalFreeMemorySize();
+    final Path memFile = Paths.get("/proc/meminfo");
     if (!(Files.isRegularFile(memFile) && Files.isReadable(memFile))) {
       assertTrue(size == 0);
     }
@@ -27,41 +29,42 @@ public class OsMemoryUtilTest {
 
   @Test
   public void getOsTotalFreeMemorySize() {
-    List<String> lines =
-        Arrays.asList("MemFree:        1 kB", "Buffers:          2 kB", "Cached:          3 kB", "SwapFree:    4 kB",
+    final List<String> lines =
+        Arrays.asList("MemFree:        1 kB", "Buffers:          2 kB", "Cached:          3 kB",
+            "SwapFree:    4 kB",
             "Foo: 10 kB");
 
-    long size = util.getOsTotalFreeMemorySizeFromStrings(lines);
+    final long size = this.util.getOsTotalFreeMemorySizeFromStrings(lines);
     assertEquals(10, size);
   }
 
   @Test
   public void getOsTotalFreeMemorySizeMissingEntry() {
-    List<String> lines = Arrays.asList("MemFree:        1 kB", "Foo: 10 kB");
+    final List<String> lines = Arrays.asList("MemFree:        1 kB", "Foo: 10 kB");
 
-    long size = util.getOsTotalFreeMemorySizeFromStrings(lines);
+    final long size = this.util.getOsTotalFreeMemorySizeFromStrings(lines);
     assertEquals(0, size);
   }
 
   @Test
   public void getOsTotalFreeMemorySizeWrongEntry() {
-    List<String> lines = Collections.singletonList("MemFree:        foo kB");
+    final List<String> lines = Collections.singletonList("MemFree:        foo kB");
 
-    long size = util.getOsTotalFreeMemorySizeFromStrings(lines);
+    final long size = this.util.getOsTotalFreeMemorySizeFromStrings(lines);
     assertEquals(0, size);
   }
 
   @Test
   public void parseMemoryLine() {
-    String line = "MemFree:        500 kB";
-    long size = util.parseMemoryLine(line);
+    final String line = "MemFree:        500 kB";
+    final long size = this.util.parseMemoryLine(line);
     assertEquals(500, size);
   }
 
   @Test
   public void parseIncorrectMemoryLine() {
-    String line = "MemFree:        ab kB";
-    long size = util.parseMemoryLine(line);
+    final String line = "MemFree:        ab kB";
+    final long size = this.util.parseMemoryLine(line);
     assertEquals(0, size);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/PatternLayoutEscapedTest.java b/azkaban-common/src/test/java/azkaban/utils/PatternLayoutEscapedTest.java
index 2cd3cf3..e854e21 100644
--- a/azkaban-common/src/test/java/azkaban/utils/PatternLayoutEscapedTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/PatternLayoutEscapedTest.java
@@ -16,72 +16,73 @@
 
 package azkaban.utils;
 
-import org.apache.log4j.PatternLayout;
+import static org.junit.Assert.assertTrue;
+
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
 import org.apache.log4j.spi.LoggingEvent;
-
 import org.junit.Before;
 import org.junit.Test;
 
-import static org.junit.Assert.assertTrue;
-
 /**
  * Test output of PatternLayoutEscapedTest
  * It should be appending stack traces, escaping new lines, quotes, tabs and backslashes
  * This is necessary when we are logging these messages out as JSON objects
  */
 public class PatternLayoutEscapedTest {
-  private Logger logger = Logger.getLogger(this.getClass());
-  private PatternLayout layout = new PatternLayoutEscaped();
+
+  private final Logger logger = Logger.getLogger(this.getClass());
+  private final PatternLayout layout = new PatternLayoutEscaped();
 
   @Before
   public void beforeTest() {
-    layout.setConversionPattern("%m");
+    this.layout.setConversionPattern("%m");
   }
 
   @Test
   public void testWithException() {
     try {
       throw new Exception("This is an exception");
-    } catch (Exception e) {
-      LoggingEvent event = createEventWithException("There was an exception", e);
+    } catch (final Exception e) {
+      final LoggingEvent event = createEventWithException("There was an exception", e);
       // Stack trace might change if the codebase changes, but this prefix should always remain the same
-      assertTrue(layout.format(event).startsWith("There was an exception\\njava.lang.Exception: This is an exception"));
+      assertTrue(this.layout.format(event)
+          .startsWith("There was an exception\\njava.lang.Exception: This is an exception"));
     }
   }
 
   @Test
   public void testNewLine() {
-    LoggingEvent event = createMessageEvent("This message contains \n new lines");
-    assertTrue(layout.format(event).equals("This message contains \\n new lines"));
+    final LoggingEvent event = createMessageEvent("This message contains \n new lines");
+    assertTrue(this.layout.format(event).equals("This message contains \\n new lines"));
   }
 
   @Test
   public void testQuote() {
-    LoggingEvent event = createMessageEvent("This message contains \" quotes");
-    assertTrue(layout.format(event).equals("This message contains \\\" quotes"));
+    final LoggingEvent event = createMessageEvent("This message contains \" quotes");
+    assertTrue(this.layout.format(event).equals("This message contains \\\" quotes"));
   }
 
   @Test
   public void testTab() {
-    LoggingEvent event = createMessageEvent("This message contains a tab \t");
-    assertTrue(layout.format(event).equals("This message contains a tab \\t"));
+    final LoggingEvent event = createMessageEvent("This message contains a tab \t");
+    assertTrue(this.layout.format(event).equals("This message contains a tab \\t"));
   }
 
   @Test
   public void testBackSlash() {
-    LoggingEvent event = createMessageEvent("This message contains a backslash \\");
-    assertTrue(layout.format(event).equals("This message contains a backslash \\\\"));
+    final LoggingEvent event = createMessageEvent("This message contains a backslash \\");
+    assertTrue(this.layout.format(event).equals("This message contains a backslash \\\\"));
   }
 
-  private LoggingEvent createMessageEvent(String message) {
+  private LoggingEvent createMessageEvent(final String message) {
     return createEventWithException(message, null);
   }
 
-  private LoggingEvent createEventWithException(String message, Exception e) {
+  private LoggingEvent createEventWithException(final String message, final Exception e) {
     return new LoggingEvent(this.getClass().getCanonicalName(),
-        logger,
+        this.logger,
         0,
         Level.toLevel("INFO"),
         message,
diff --git a/azkaban-common/src/test/java/azkaban/utils/PropsUtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/PropsUtilsTest.java
index 3201d65..0565f23 100644
--- a/azkaban-common/src/test/java/azkaban/utils/PropsUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/PropsUtilsTest.java
@@ -18,16 +18,16 @@ package azkaban.utils;
 
 import java.io.IOException;
 import java.util.Map;
-
 import org.junit.Assert;
 import org.junit.Test;
 
 public class PropsUtilsTest {
+
   @Test
   public void testGoodResolveProps() throws IOException {
-    Props propsGrandParent = new Props();
-    Props propsParent = new Props(propsGrandParent);
-    Props props = new Props(propsParent);
+    final Props propsGrandParent = new Props();
+    final Props propsParent = new Props(propsGrandParent);
+    final Props props = new Props(propsParent);
 
     // Testing props in general
     props.put("letter", "a");
@@ -53,7 +53,7 @@ public class PropsUtilsTest {
     propsGrandParent.put("res5", "${their}");
     propsParent.put("res6", " t ${your} ${your} ${their} ${res5}");
 
-    Props resolved = PropsUtils.resolveProps(props);
+    final Props resolved = PropsUtils.resolveProps(props);
     Assert.assertEquals("name", resolved.get("res1"));
     Assert.assertEquals("ears a", resolved.get("res2"));
     Assert.assertEquals("eyes ears a", resolved.get("res3"));
@@ -65,20 +65,20 @@ public class PropsUtilsTest {
 
   @Test
   public void testInvalidSyntax() throws Exception {
-    Props propsGrandParent = new Props();
-    Props propsParent = new Props(propsGrandParent);
-    Props props = new Props(propsParent);
+    final Props propsGrandParent = new Props();
+    final Props propsParent = new Props(propsGrandParent);
+    final Props props = new Props(propsParent);
 
     propsParent.put("my", "name");
     props.put("res1", "$(my)");
 
-    Props resolved = PropsUtils.resolveProps(props);
+    final Props resolved = PropsUtils.resolveProps(props);
     Assert.assertEquals("$(my)", resolved.get("res1"));
   }
 
   @Test
   public void testExpressionResolution() throws IOException {
-    Props props =
+    final Props props =
         Props.of("normkey", "normal", "num1", "1", "num2", "2", "num3", "3",
             "variablereplaced", "${num1}", "expression1", "$(1+10)",
             "expression2", "$(1+10)*2", "expression3",
@@ -88,7 +88,7 @@ public class PropsUtilsTest {
             "$(1 + ${normkey})", "expression7", "$(\"${normkey}\" + 1)",
             "expression8", "${expression1}", "expression9", "$((2+3) + 3)");
 
-    Props resolved = PropsUtils.resolveProps(props);
+    final Props resolved = PropsUtils.resolveProps(props);
     Assert.assertEquals("normal", resolved.get("normkey"));
     Assert.assertEquals("1", resolved.get("num1"));
     Assert.assertEquals("2", resolved.get("num2"));
@@ -131,53 +131,52 @@ public class PropsUtilsTest {
   public void testGetFlattenedProps() throws Exception {
 
     // for empty props empty flattened map is expected to be returned.
-    Props grandParentProps = new Props();
+    final Props grandParentProps = new Props();
     Assert.assertTrue(grandParentProps.getFlattened().isEmpty());
 
     // single level
-    grandParentProps.put("test1","value1");
-    grandParentProps.put("test2","value2");
-    Map<String,String> set = grandParentProps.getFlattened();
-    Assert.assertEquals(2,set.size());
+    grandParentProps.put("test1", "value1");
+    grandParentProps.put("test2", "value2");
+    Map<String, String> set = grandParentProps.getFlattened();
+    Assert.assertEquals(2, set.size());
     Assert.assertEquals("value1", set.get("test1"));
     Assert.assertEquals("value2", set.get("test2"));
 
     // multiple levels .
-    Props parentProps = new Props(grandParentProps);
-    parentProps.put("test3","value3");
-    parentProps.put("test4","value4");
+    final Props parentProps = new Props(grandParentProps);
+    parentProps.put("test3", "value3");
+    parentProps.put("test4", "value4");
     set = parentProps.getFlattened();
-    Assert.assertEquals(4,set.size());
+    Assert.assertEquals(4, set.size());
     Assert.assertEquals("value3", set.get("test3"));
     Assert.assertEquals("value1", set.get("test1"));
 
     // multiple levels with same keys  .
-    Props props = new Props(parentProps);
-    props.put("test5","value5");
-    props.put("test1","value1.1");
+    final Props props = new Props(parentProps);
+    props.put("test5", "value5");
+    props.put("test1", "value1.1");
     set = props.getFlattened();
-    Assert.assertEquals(5,set.size());
+    Assert.assertEquals(5, set.size());
     Assert.assertEquals("value5", set.get("test5"));
     Assert.assertEquals("value1.1", set.get("test1"));
 
     // verify when iterating the elements are sorted by the key value.
-    Props props2 = new Props();
-    props2.put("2","2");
-    props2.put("0","0");
-    props2.put("1","1");
+    final Props props2 = new Props();
+    props2.put("2", "2");
+    props2.put("0", "0");
+    props2.put("1", "1");
     set = props2.getFlattened();
-    int index = 0 ;
-    for (Map.Entry<String, String> item : set.entrySet())
-    {
-      Assert.assertEquals(item.getKey(),Integer.toString(index++));
+    int index = 0;
+    for (final Map.Entry<String, String> item : set.entrySet()) {
+      Assert.assertEquals(item.getKey(), Integer.toString(index++));
     }
   }
 
   @Test
   public void testCyclesResolveProps() throws IOException {
-    Props propsGrandParent = new Props();
-    Props propsParent = new Props(propsGrandParent);
-    Props props = new Props(propsParent);
+    final Props propsGrandParent = new Props();
+    final Props propsParent = new Props(propsGrandParent);
+    final Props props = new Props(propsParent);
 
     // Testing props in general
     props.put("a", "${a}");
@@ -210,42 +209,44 @@ public class PropsUtilsTest {
 
   @Test
   public void testGetPropertyDiff() throws IOException {
-    Props oldProps = new Props();
-    Props newProps1 = new Props();
+    final Props oldProps = new Props();
+    final Props newProps1 = new Props();
 
     oldProps.put("a", "a_value1");
     oldProps.put("b", "b_value1");
 
     newProps1.put("b", "b_value2");
 
-    String message1 = PropsUtils.getPropertyDiff(oldProps, newProps1);
-    Assert.assertEquals(message1, "Deleted Properties: [ a, a_value1], \nModified Properties: [ b, b_value1-->b_value2], ");
+    final String message1 = PropsUtils.getPropertyDiff(oldProps, newProps1);
+    Assert.assertEquals(message1,
+        "Deleted Properties: [ a, a_value1], \nModified Properties: [ b, b_value1-->b_value2], ");
 
-    Props newProps2 = new Props();
+    final Props newProps2 = new Props();
 
     newProps2.put("a", "a_value1");
     newProps2.put("b", "b_value1");
     newProps2.put("c", "c_value1");
 
-    String message2 = PropsUtils.getPropertyDiff(oldProps, newProps2);
+    final String message2 = PropsUtils.getPropertyDiff(oldProps, newProps2);
     Assert.assertEquals(message2, "Newly created Properties: [ c, c_value1], \n");
 
-    Props newProps3 = new Props();
+    final Props newProps3 = new Props();
 
     newProps3.put("b", "b_value1");
     newProps3.put("c", "a_value1");
 
-    String message3 = PropsUtils.getPropertyDiff(oldProps, newProps3);
-    Assert.assertEquals(message3, "Newly created Properties: [ c, a_value1], \nDeleted Properties: [ a, a_value1], \n");
+    final String message3 = PropsUtils.getPropertyDiff(oldProps, newProps3);
+    Assert.assertEquals(message3,
+        "Newly created Properties: [ c, a_value1], \nDeleted Properties: [ a, a_value1], \n");
   }
 
-  private void failIfNotException(Props props) {
+  private void failIfNotException(final Props props) {
     try {
       PropsUtils.resolveProps(props);
       Assert.fail();
-    } catch (UndefinedPropertyException e) {
+    } catch (final UndefinedPropertyException e) {
       e.printStackTrace();
-    } catch (IllegalArgumentException e) {
+    } catch (final IllegalArgumentException e) {
       e.printStackTrace();
     }
   }
diff --git a/azkaban-common/src/test/java/azkaban/utils/RestfulApiClientTest.java b/azkaban-common/src/test/java/azkaban/utils/RestfulApiClientTest.java
index 8e1d3c6..3c3995f 100644
--- a/azkaban-common/src/test/java/azkaban/utils/RestfulApiClientTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/RestfulApiClientTest.java
@@ -19,7 +19,6 @@ package azkaban.utils;
 import java.io.IOException;
 import java.net.URI;
 import java.util.ArrayList;
-
 import org.apache.http.Header;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
@@ -44,86 +43,29 @@ import org.junit.Test;
  */
 public class RestfulApiClientTest {
 
-  static class MockRestfulApiClient extends RestfulApiClient<String> {
-    private int  status = HttpStatus.SC_OK;
-
-    @Override
-    protected String parseResponse(HttpResponse response) throws IOException {
-      final StatusLine statusLine = response.getStatusLine();
-      if (statusLine.getStatusCode() >= 300) {
-          throw new HttpResponseException(statusLine.getStatusCode(),
-                  statusLine.getReasonPhrase());
-      }
-      final HttpEntity entity = response.getEntity();
-      return entity == null ? null : EntityUtils.toString(entity);
-    }
-
-    public void setReturnStatus(int newStatus){
-      this.status = newStatus;
-    }
-
-    public void resetReturnStatus(){
-      this.status = HttpStatus.SC_OK;
-    }
-
-    @Override
-    protected String sendAndReturn(HttpUriRequest request) throws IOException{
-      HttpResponseFactory factory = new DefaultHttpResponseFactory();
-
-      HttpResponse response = factory.newHttpResponse(
-          new BasicStatusLine(HttpVersion.HTTP_1_1, this.status, null),null);
-
-      StringBuilder sb = new StringBuilder();
-      sb.append(String.format("%s = %s;", "METHOD", request.getMethod()));
-      sb.append(String.format("%s = %s;", "URI", request.getURI()));
-
-      if (request.getAllHeaders().length > 0){
-        sb.append("HEADER_EXISTS");
-      }
-
-      for (Header h : request.getAllHeaders()){
-        sb.append(String.format("%s = %s;", h.getName(), h.getValue()));
-      }
-
-      if (request instanceof HttpEntityEnclosingRequestBase){
-        HttpEntity entity = ((HttpEntityEnclosingRequestBase)request).getEntity();
-        if (entity != null){
-          sb.append("BODY_EXISTS");
-          sb.append(String.format("%s = %s;", "BODY", EntityUtils.toString(entity)));
-        }
-      }
-
-      response.setEntity(new StringEntity(sb.toString()));
-      return parseResponse(response);
-    }
-
-  }
-
   @Test
   public void testHttpGet() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    String result = mockClient.httpGet(uri, null);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
+    final String result = mockClient.httpGet(uri, null);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
     Assert.assertTrue(result.contains("METHOD = GET"));
   }
 
   @Test
   public void testHttpGetWithHeaderItems() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    ArrayList<NameValuePair> headerItems = new ArrayList<NameValuePair>();
-    headerItems.add(new BasicNameValuePair("h1","v1"));
-    headerItems.add(new BasicNameValuePair("h2","v2"));
+    final ArrayList<NameValuePair> headerItems = new ArrayList<>();
+    headerItems.add(new BasicNameValuePair("h1", "v1"));
+    headerItems.add(new BasicNameValuePair("h2", "v2"));
 
-    String result = mockClient.httpGet(uri, headerItems);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
+    final String result = mockClient.httpGet(uri, headerItems);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
     Assert.assertTrue(result.contains("METHOD = GET"));
     Assert.assertTrue(result.contains("h1 = v1"));
     Assert.assertTrue(result.contains("h2 = v2"));
@@ -131,19 +73,18 @@ public class RestfulApiClientTest {
 
   @Test
   public void testHttpPost() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    ArrayList<NameValuePair> headerItems = new ArrayList<NameValuePair>();
-    headerItems.add(new BasicNameValuePair("h1","v1"));
-    headerItems.add(new BasicNameValuePair("h2","v2"));
+    final ArrayList<NameValuePair> headerItems = new ArrayList<>();
+    headerItems.add(new BasicNameValuePair("h1", "v1"));
+    headerItems.add(new BasicNameValuePair("h2", "v2"));
 
-    String content = "123456789";
+    final String content = "123456789";
 
-    String result = mockClient.httpPost(uri, headerItems,content);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
+    final String result = mockClient.httpPost(uri, headerItems, content);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
     Assert.assertTrue(result.contains("METHOD = POST"));
     Assert.assertTrue(result.contains("h1 = v1"));
     Assert.assertTrue(result.contains("h2 = v2"));
@@ -152,13 +93,12 @@ public class RestfulApiClientTest {
 
   @Test
   public void testHttpPostWOBody() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    String result = mockClient.httpPost(uri, null,null);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
+    final String result = mockClient.httpPost(uri, null, null);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
     Assert.assertTrue(result.contains("METHOD = POST"));
     Assert.assertFalse(result.contains("BODY_EXISTS"));
     Assert.assertFalse(result.contains("HEADER_EXISTS"));
@@ -166,19 +106,18 @@ public class RestfulApiClientTest {
 
   @Test
   public void testHttpPut() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    ArrayList<NameValuePair> headerItems = new ArrayList<NameValuePair>();
-    headerItems.add(new BasicNameValuePair("h1","v1"));
-    headerItems.add(new BasicNameValuePair("h2","v2"));
+    final ArrayList<NameValuePair> headerItems = new ArrayList<>();
+    headerItems.add(new BasicNameValuePair("h1", "v1"));
+    headerItems.add(new BasicNameValuePair("h2", "v2"));
 
-    String content = "123456789";
+    final String content = "123456789";
 
-    String result = mockClient.httpPut(uri, headerItems,content);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
+    final String result = mockClient.httpPut(uri, headerItems, content);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
     Assert.assertTrue(result.contains("METHOD = PUT"));
     Assert.assertTrue(result.contains("h1 = v1"));
     Assert.assertTrue(result.contains("h2 = v2"));
@@ -187,51 +126,104 @@ public class RestfulApiClientTest {
 
   @Test
   public void testContentLength() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    String content = "123456789";
+    final String content = "123456789";
 
-    String result = mockClient.httpPut(uri, null,content);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
+    final String result = mockClient.httpPut(uri, null, content);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
     Assert.assertTrue(result.contains("Content-Length = " + Integer.toString(content.length())));
   }
 
   @Test
   public void testContentLengthOverride() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    ArrayList<NameValuePair> headerItems = new ArrayList<NameValuePair>();
-    headerItems.add(new BasicNameValuePair("Content-Length","0"));
+    final ArrayList<NameValuePair> headerItems = new ArrayList<>();
+    headerItems.add(new BasicNameValuePair("Content-Length", "0"));
 
-    String content = "123456789";
+    final String content = "123456789";
 
-    String result = mockClient.httpPut(uri, headerItems,content);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
-    Assert.assertEquals(result.lastIndexOf("Content-Length"),result.indexOf("Content-Length"));
+    final String result = mockClient.httpPut(uri, headerItems, content);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
+    Assert.assertEquals(result.lastIndexOf("Content-Length"), result.indexOf("Content-Length"));
     Assert.assertTrue(result.contains("Content-Length = " + Integer.toString(content.length())));
   }
 
   @Test
   public void testHttpDelete() throws Exception {
-    MockRestfulApiClient mockClient = new MockRestfulApiClient();
-    @SuppressWarnings("unchecked")
-    URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
-        new Pair <String,String>("Entry1","Value1"));
+    final MockRestfulApiClient mockClient = new MockRestfulApiClient();
+    final URI uri = MockRestfulApiClient.buildUri("test.com", 80, "test", true,
+        new Pair<>("Entry1", "Value1"));
 
-    ArrayList<NameValuePair> headerItems = new ArrayList<NameValuePair>();
-    headerItems.add(new BasicNameValuePair("h1","v1"));
-    headerItems.add(new BasicNameValuePair("h2","v2"));
+    final ArrayList<NameValuePair> headerItems = new ArrayList<>();
+    headerItems.add(new BasicNameValuePair("h1", "v1"));
+    headerItems.add(new BasicNameValuePair("h2", "v2"));
 
-    String result = mockClient.httpDelete(uri, headerItems);
-    Assert.assertTrue(result!= null && result.contains(uri.toString()));
+    final String result = mockClient.httpDelete(uri, headerItems);
+    Assert.assertTrue(result != null && result.contains(uri.toString()));
     Assert.assertTrue(result.contains("METHOD = DELETE"));
     Assert.assertTrue(result.contains("h1 = v1"));
     Assert.assertTrue(result.contains("h2 = v2"));
   }
+
+  static class MockRestfulApiClient extends RestfulApiClient<String> {
+
+    private int status = HttpStatus.SC_OK;
+
+    @Override
+    protected String parseResponse(final HttpResponse response) throws IOException {
+      final StatusLine statusLine = response.getStatusLine();
+      if (statusLine.getStatusCode() >= 300) {
+        throw new HttpResponseException(statusLine.getStatusCode(),
+            statusLine.getReasonPhrase());
+      }
+      final HttpEntity entity = response.getEntity();
+      return entity == null ? null : EntityUtils.toString(entity);
+    }
+
+    public void setReturnStatus(final int newStatus) {
+      this.status = newStatus;
+    }
+
+    public void resetReturnStatus() {
+      this.status = HttpStatus.SC_OK;
+    }
+
+    @Override
+    protected String sendAndReturn(final HttpUriRequest request) throws IOException {
+      final HttpResponseFactory factory = new DefaultHttpResponseFactory();
+
+      final HttpResponse response = factory.newHttpResponse(
+          new BasicStatusLine(HttpVersion.HTTP_1_1, this.status, null), null);
+
+      final StringBuilder sb = new StringBuilder();
+      sb.append(String.format("%s = %s;", "METHOD", request.getMethod()));
+      sb.append(String.format("%s = %s;", "URI", request.getURI()));
+
+      if (request.getAllHeaders().length > 0) {
+        sb.append("HEADER_EXISTS");
+      }
+
+      for (final Header h : request.getAllHeaders()) {
+        sb.append(String.format("%s = %s;", h.getName(), h.getValue()));
+      }
+
+      if (request instanceof HttpEntityEnclosingRequestBase) {
+        final HttpEntity entity = ((HttpEntityEnclosingRequestBase) request).getEntity();
+        if (entity != null) {
+          sb.append("BODY_EXISTS");
+          sb.append(String.format("%s = %s;", "BODY", EntityUtils.toString(entity)));
+        }
+      }
+
+      response.setEntity(new StringEntity(sb.toString()));
+      return parseResponse(response);
+    }
+
+  }
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/StringUtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/StringUtilsTest.java
index 71c3d21..2148262 100644
--- a/azkaban-common/src/test/java/azkaban/utils/StringUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/StringUtilsTest.java
@@ -16,16 +16,16 @@ public class StringUtilsTest {
   private static final String fireFoxOnLinux =
       "Mozilla/5.0 (X11; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0";
 
-  private static final String[] browserVariants = { chromeOnMac, fireFoxOnMac,
-      safariOnMac, chromeOnLinux, fireFoxOnLinux };
+  private static final String[] browserVariants = {chromeOnMac, fireFoxOnMac,
+      safariOnMac, chromeOnLinux, fireFoxOnLinux};
 
-  private static final String[] BROWSER_NAMES = { "AppleWebKit", "Gecko",
-      "Chrome" };
+  private static final String[] BROWSER_NAMES = {"AppleWebKit", "Gecko",
+      "Chrome"};
 
   @Test
   public void isBrowser() throws Exception {
 
-    for (String browser : browserVariants) {
+    for (final String browser : browserVariants) {
       Assert.assertTrue(browser, StringUtils.isFromBrowser(browser));
     }
   }
@@ -33,7 +33,7 @@ public class StringUtilsTest {
   @Test
   public void notBrowserWithLowercase() throws Exception {
 
-    for (String browser : browserVariants) {
+    for (final String browser : browserVariants) {
       Assert.assertFalse(browser.toLowerCase(),
           StringUtils.isFromBrowser(browser.toLowerCase()));
     }
@@ -41,7 +41,7 @@ public class StringUtilsTest {
 
   @Test
   public void notBrowser() throws Exception {
-    String testStr = "curl";
+    final String testStr = "curl";
     Assert.assertFalse(testStr, StringUtils.isFromBrowser(testStr));
   }
 
@@ -59,21 +59,21 @@ public class StringUtilsTest {
 
   @Test
   public void startsWithBrowserName() {
-    for (String name : BROWSER_NAMES) {
+    for (final String name : BROWSER_NAMES) {
       Assert.assertTrue(StringUtils.isFromBrowser(name + " is awesome"));
     }
   }
 
   @Test
   public void endsWithBrowserName() {
-    for (String name : BROWSER_NAMES) {
+    for (final String name : BROWSER_NAMES) {
       Assert.assertTrue(StringUtils.isFromBrowser("awesome is" + name));
     }
   }
 
   @Test
   public void containsBrowserName() {
-    for (String name : BROWSER_NAMES) {
+    for (final String name : BROWSER_NAMES) {
       Assert.assertTrue(StringUtils.isFromBrowser("awesome " + name + " is"));
     }
   }
diff --git a/azkaban-common/src/test/java/azkaban/utils/SystemMemoryInfoTest.java b/azkaban-common/src/test/java/azkaban/utils/SystemMemoryInfoTest.java
index 9412c1d..eda1b2e 100644
--- a/azkaban-common/src/test/java/azkaban/utils/SystemMemoryInfoTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/SystemMemoryInfoTest.java
@@ -1,43 +1,46 @@
 package azkaban.utils;
 
-import org.junit.Test;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
 
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
+import org.junit.Test;
 
 
 public class SystemMemoryInfoTest {
+
   private static final long GB_UNIT = 1024L * 1024L;
 
   @Test
   public void grantedIfFreeMemoryAvailable() throws Exception {
-    OsMemoryUtil memUtil = mock(OsMemoryUtil.class);
-    long availableFreeMem = 10L * 1024L * 1024L; //10 GB
+    final OsMemoryUtil memUtil = mock(OsMemoryUtil.class);
+    final long availableFreeMem = 10L * 1024L * 1024L; //10 GB
     when(memUtil.getOsTotalFreeMemorySize()).thenReturn(availableFreeMem);
-    SystemMemoryInfo memInfo = new SystemMemoryInfo(memUtil);
-    boolean isGranted = memInfo.canSystemGrantMemory(1);
+    final SystemMemoryInfo memInfo = new SystemMemoryInfo(memUtil);
+    final boolean isGranted = memInfo.canSystemGrantMemory(1);
     assertTrue(isGranted);
   }
 
   @Test
   public void notGrantedIfFreeMemoryAvailableLessThanMinimal() throws Exception {
-    OsMemoryUtil memUtil = mock(OsMemoryUtil.class);
-    long availableFreeMem = 4L * 1024L * 1024L; //4 GB
+    final OsMemoryUtil memUtil = mock(OsMemoryUtil.class);
+    final long availableFreeMem = 4L * 1024L * 1024L; //4 GB
     when(memUtil.getOsTotalFreeMemorySize()).thenReturn(availableFreeMem);
-    SystemMemoryInfo memInfo = new SystemMemoryInfo(memUtil);
-    long xmx = 2 * GB_UNIT; //2 GB
-    boolean isGranted = memInfo.canSystemGrantMemory(xmx);
+    final SystemMemoryInfo memInfo = new SystemMemoryInfo(memUtil);
+    final long xmx = 2 * GB_UNIT; //2 GB
+    final boolean isGranted = memInfo.canSystemGrantMemory(xmx);
     assertFalse(isGranted);
   }
 
   @Test
   public void grantedIfFreeMemoryCheckReturnsZero() throws Exception {
-    OsMemoryUtil memUtil = mock(OsMemoryUtil.class);
-    long availableFreeMem = 0;
+    final OsMemoryUtil memUtil = mock(OsMemoryUtil.class);
+    final long availableFreeMem = 0;
     when(memUtil.getOsTotalFreeMemorySize()).thenReturn(availableFreeMem);
-    SystemMemoryInfo memInfo = new SystemMemoryInfo(memUtil);
-    long xmx = 0;
-    boolean isGranted = memInfo.canSystemGrantMemory(xmx);
+    final SystemMemoryInfo memInfo = new SystemMemoryInfo(memUtil);
+    final long xmx = 0;
+    final boolean isGranted = memInfo.canSystemGrantMemory(xmx);
     assertTrue("Memory check failed. Should fail open", isGranted);
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/TestUtils.java b/azkaban-common/src/test/java/azkaban/utils/TestUtils.java
index 3eff490..1f1cb46 100644
--- a/azkaban-common/src/test/java/azkaban/utils/TestUtils.java
+++ b/azkaban-common/src/test/java/azkaban/utils/TestUtils.java
@@ -16,21 +16,21 @@
 
 package azkaban.utils;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.flow.Flow;
 import azkaban.project.Project;
 import azkaban.user.User;
 import azkaban.user.UserManager;
 import azkaban.user.XmlUserManager;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
 
 /**
  * Commonly used utils method for unit/integration tests
  */
 public class TestUtils {
+
   /* Base  resource direcotyr for unit tests */
   private static final String UNIT_RESOURCE_DIR =
       "../azkaban-test/src/test/resources/azkaban/test";
@@ -38,9 +38,9 @@ public class TestUtils {
   private static final String UNIT_EXECUTION_DIR =
       UNIT_RESOURCE_DIR + "/executions";
 
-  public static File getFlowDir(String projectName, String flow) {
+  public static File getFlowDir(final String projectName, final String flow) {
     return new File(String.format("%s/%s/%s.flow", UNIT_EXECUTION_DIR, projectName,
-      flow));
+        flow));
   }
 
   public static User getTestUser() {
@@ -48,29 +48,28 @@ public class TestUtils {
   }
 
   /* Helper method to create an ExecutableFlow from serialized description */
-  public static ExecutableFlow createExecutableFlow(String projectName,
-    String flowName) throws IOException {
-    File jsonFlowFile = getFlowDir(projectName, flowName);
-    @SuppressWarnings("unchecked")
-    HashMap<String, Object> flowObj =
-      (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
+  public static ExecutableFlow createExecutableFlow(final String projectName,
+      final String flowName) throws IOException {
+    final File jsonFlowFile = getFlowDir(projectName, flowName);
+    final HashMap<String, Object> flowObj =
+        (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
 
-    Flow flow = Flow.flowFromObject(flowObj);
-    Project project = new Project(1, "flow");
-    HashMap<String, Flow> flowMap = new HashMap<String, Flow>();
+    final Flow flow = Flow.flowFromObject(flowObj);
+    final Project project = new Project(1, "flow");
+    final HashMap<String, Flow> flowMap = new HashMap<>();
     flowMap.put(flow.getId(), flow);
     project.setFlows(flowMap);
-    ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+    final ExecutableFlow execFlow = new ExecutableFlow(project, flow);
 
     return execFlow;
   }
 
   /* Helper method to create an XmlUserManager from XML_FILE_PARAM file */
   public static UserManager createTestXmlUserManager() {
-    Props props = new Props();
+    final Props props = new Props();
     props.put(XmlUserManager.XML_FILE_PARAM, UNIT_RESOURCE_DIR
-      + "/azkaban-users.xml");
-    UserManager manager = new XmlUserManager(props);
+        + "/azkaban-users.xml");
+    final UserManager manager = new XmlUserManager(props);
     return manager;
   }
 }
diff --git a/azkaban-common/src/test/java/azkaban/utils/UtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/UtilsTest.java
index 526cc36..d767dc8 100644
--- a/azkaban-common/src/test/java/azkaban/utils/UtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/UtilsTest.java
@@ -58,7 +58,7 @@ public class UtilsTest {
   @Test
   public void testValidCronExpressionV() {
 
-    DateTimeZone timezone = DateTimeZone.getDefault();
+    final DateTimeZone timezone = DateTimeZone.getDefault();
     Assert.assertTrue(Utils.isCronExpressionValid("0 0 3 ? * *", timezone));
     Assert.assertTrue(Utils.isCronExpressionValid("0 0 3 ? * * 2017", timezone));
     Assert.assertTrue(Utils.isCronExpressionValid("0 0 * ? * *", timezone));
@@ -72,7 +72,7 @@ public class UtilsTest {
   @Test
   public void testInvalidCronExpression() {
 
-    DateTimeZone timezone = DateTimeZone.getDefault();
+    final DateTimeZone timezone = DateTimeZone.getDefault();
     Assert.assertFalse(Utils.isCronExpressionValid("0 0 3 * * *", timezone));
     Assert.assertFalse(Utils.isCronExpressionValid("0 66 * ? * *", timezone));
     Assert.assertFalse(Utils.isCronExpressionValid("0 * * ? * 8", timezone));
diff --git a/azkaban-common/src/test/java/azkaban/utils/WebUtilsTest.java b/azkaban-common/src/test/java/azkaban/utils/WebUtilsTest.java
index 39cb66e..be00ff0 100644
--- a/azkaban-common/src/test/java/azkaban/utils/WebUtilsTest.java
+++ b/azkaban-common/src/test/java/azkaban/utils/WebUtilsTest.java
@@ -16,14 +16,11 @@
 
 package azkaban.utils;
 
-import org.junit.Assert;
-import org.junit.Test;
+import static org.junit.Assert.assertEquals;
 
 import java.util.HashMap;
 import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
+import org.junit.Test;
 
 /**
  * Test class for azkaban.utils.WebUtils
@@ -31,59 +28,59 @@ import static org.junit.Assert.assertNotNull;
 public class WebUtilsTest {
 
   @Test
-  public void testWhenNoXForwardedForHeaderUseClientIp(){
+  public void testWhenNoXForwardedForHeaderUseClientIp() {
 
-    String clientIp = "127.0.0.1:10000";
-    Map<String, String> headers = new HashMap<>();
+    final String clientIp = "127.0.0.1:10000";
+    final Map<String, String> headers = new HashMap<>();
 
-    WebUtils utils = new WebUtils();
+    final WebUtils utils = new WebUtils();
 
-    String ip = utils.getRealClientIpAddr(headers, clientIp);
+    final String ip = utils.getRealClientIpAddr(headers, clientIp);
 
     assertEquals(ip, "127.0.0.1");
   }
 
   @Test
-  public void testWhenClientIpNoPort(){
+  public void testWhenClientIpNoPort() {
 
-    String clientIp = "192.168.1.1";
-    Map<String, String> headers = new HashMap<>();
+    final String clientIp = "192.168.1.1";
+    final Map<String, String> headers = new HashMap<>();
 
-    WebUtils utils = new WebUtils();
+    final WebUtils utils = new WebUtils();
 
-    String ip = utils.getRealClientIpAddr(headers, clientIp);
+    final String ip = utils.getRealClientIpAddr(headers, clientIp);
 
     assertEquals(ip, "192.168.1.1");
   }
 
   @Test
-  public void testWhenXForwardedForHeaderUseHeader(){
+  public void testWhenXForwardedForHeaderUseHeader() {
 
-    String clientIp = "127.0.0.1:10000";
-    String upstreamIp = "192.168.1.1:10000";
-    Map<String, String> headers = new HashMap<>();
+    final String clientIp = "127.0.0.1:10000";
+    final String upstreamIp = "192.168.1.1:10000";
+    final Map<String, String> headers = new HashMap<>();
 
     headers.put("X-Forwarded-For", upstreamIp);
 
-    WebUtils utils = new WebUtils();
+    final WebUtils utils = new WebUtils();
 
-    String ip = utils.getRealClientIpAddr(headers, clientIp);
+    final String ip = utils.getRealClientIpAddr(headers, clientIp);
 
     assertEquals(ip, "192.168.1.1");
   }
 
   @Test
-  public void testWhenXForwardedForHeaderMultipleUpstreamsUseHeader(){
+  public void testWhenXForwardedForHeaderMultipleUpstreamsUseHeader() {
 
-    String clientIp = "127.0.0.1:10000";
-    String upstreamIp = "192.168.1.1:10000";
-    Map<String, String> headers = new HashMap<>();
+    final String clientIp = "127.0.0.1:10000";
+    final String upstreamIp = "192.168.1.1:10000";
+    final Map<String, String> headers = new HashMap<>();
 
     headers.put("X-Forwarded-For", upstreamIp + ",127.0.0.1,55.55.55.55");
 
-    WebUtils utils = new WebUtils();
+    final WebUtils utils = new WebUtils();
 
-    String ip = utils.getRealClientIpAddr(headers, clientIp);
+    final String ip = utils.getRealClientIpAddr(headers, clientIp);
 
     assertEquals(ip, "192.168.1.1");
   }
diff --git a/azkaban-common/src/test/resources/azkaban/executor/mail/errorEmail.html b/azkaban-common/src/test/resources/azkaban/executor/mail/errorEmail.html
index 610f883..8593525 100644
--- a/azkaban-common/src/test/resources/azkaban/executor/mail/errorEmail.html
+++ b/azkaban-common/src/test/resources/azkaban/executor/mail/errorEmail.html
@@ -1 +1,25 @@
-<h2 style="color:#FF0000"> Execution '-1' of flow 'mail-creator-test' of project 'test-project' has failed on unit-tests</h2><table><tr><td>Start Time</td><td>2016/07/17 11:54:11 EEST</td></tr><tr><td>End Time</td><td>2016/07/17 11:54:16 EEST</td></tr><tr><td>Duration</td><td>5 sec</td></tr><tr><td>Status</td><td>FAILED</td></tr></table><a href="http://localhost:8081/executor?execid=-1">mail-creator-test Execution Link</a><h3>Reason</h3><ul><li><a href="http://localhost:8081/executor?execid=-1&job=test-job">Failed job 'test-job' Link</a></li></ul>
+<h2 style="color:#FF0000"> Execution '-1' of flow 'mail-creator-test' of project 'test-project' has
+  failed on unit-tests</h2>
+<table>
+  <tr>
+    <td>Start Time</td>
+    <td>2016/07/17 11:54:11 EEST</td>
+  </tr>
+  <tr>
+    <td>End Time</td>
+    <td>2016/07/17 11:54:16 EEST</td>
+  </tr>
+  <tr>
+    <td>Duration</td>
+    <td>5 sec</td>
+  </tr>
+  <tr>
+    <td>Status</td>
+    <td>FAILED</td>
+  </tr>
+</table><a href="http://localhost:8081/executor?execid=-1">mail-creator-test Execution Link</a><h3>
+  Reason</h3>
+<ul>
+  <li><a href="http://localhost:8081/executor?execid=-1&job=test-job">Failed job 'test-job' Link</a>
+  </li>
+</ul>
diff --git a/azkaban-common/src/test/resources/azkaban/executor/mail/firstErrorMessage.html b/azkaban-common/src/test/resources/azkaban/executor/mail/firstErrorMessage.html
index b47556a..201e614 100644
--- a/azkaban-common/src/test/resources/azkaban/executor/mail/firstErrorMessage.html
+++ b/azkaban-common/src/test/resources/azkaban/executor/mail/firstErrorMessage.html
@@ -1 +1,26 @@
-<h2 style="color:#FF0000"> Execution '-1' of flow 'mail-creator-test' of project 'test-project' has encountered a failure on unit-tests</h2>This flow is set to complete all currently running jobs before stopping.<table><tr><td>Start Time</td><td>2016/07/17 11:54:11 EEST</td></tr><tr><td>End Time</td><td>N/A</td></tr><tr><td>Duration</td><td>10 sec</td></tr><tr><td>Status</td><td>FAILED_FINISHING</td></tr></table><a href="http://localhost:8081/executor?execid=-1">mail-creator-test Execution Link</a><h3>Reason</h3><ul><li><a href="http://localhost:8081/executor?execid=-1&job=test-job">Failed job 'test-job' Link</a></li></ul>
+<h2 style="color:#FF0000"> Execution '-1' of flow 'mail-creator-test' of project 'test-project' has
+  encountered a failure on
+  unit-tests</h2>This flow is set to complete all currently running jobs before stopping.
+<table>
+  <tr>
+    <td>Start Time</td>
+    <td>2016/07/17 11:54:11 EEST</td>
+  </tr>
+  <tr>
+    <td>End Time</td>
+    <td>N/A</td>
+  </tr>
+  <tr>
+    <td>Duration</td>
+    <td>10 sec</td>
+  </tr>
+  <tr>
+    <td>Status</td>
+    <td>FAILED_FINISHING</td>
+  </tr>
+</table><a href="http://localhost:8081/executor?execid=-1">mail-creator-test Execution Link</a><h3>
+  Reason</h3>
+<ul>
+  <li><a href="http://localhost:8081/executor?execid=-1&job=test-job">Failed job 'test-job' Link</a>
+  </li>
+</ul>
diff --git a/azkaban-common/src/test/resources/azkaban/executor/mail/successEmail.html b/azkaban-common/src/test/resources/azkaban/executor/mail/successEmail.html
index baec8f9..d961c2d 100644
--- a/azkaban-common/src/test/resources/azkaban/executor/mail/successEmail.html
+++ b/azkaban-common/src/test/resources/azkaban/executor/mail/successEmail.html
@@ -1 +1,20 @@
-<h2> Execution '-1' of flow 'mail-creator-test' of project 'test-project' has succeeded on unit-tests</h2><table><tr><td>Start Time</td><td>2016/07/17 11:54:11 EEST</td></tr><tr><td>End Time</td><td>2016/07/17 11:54:16 EEST</td></tr><tr><td>Duration</td><td>5 sec</td></tr><tr><td>Status</td><td>SUCCEEDED</td></tr></table><a href="http://localhost:8081/executor?execid=-1">mail-creator-test Execution Link</a>
+<h2> Execution '-1' of flow 'mail-creator-test' of project 'test-project' has succeeded on
+  unit-tests</h2>
+<table>
+  <tr>
+    <td>Start Time</td>
+    <td>2016/07/17 11:54:11 EEST</td>
+  </tr>
+  <tr>
+    <td>End Time</td>
+    <td>2016/07/17 11:54:16 EEST</td>
+  </tr>
+  <tr>
+    <td>Duration</td>
+    <td>5 sec</td>
+  </tr>
+  <tr>
+    <td>Status</td>
+    <td>SUCCEEDED</td>
+  </tr>
+</table><a href="http://localhost:8081/executor?execid=-1">mail-creator-test Execution Link</a>
diff --git a/azkaban-common/src/test/resources/log4j.properties b/azkaban-common/src/test/resources/log4j.properties
index fbc13fd..36c72cb 100644
--- a/azkaban-common/src/test/resources/log4j.properties
+++ b/azkaban-common/src/test/resources/log4j.properties
@@ -1,5 +1,4 @@
 log4j.rootLogger=INFO, Console
-
 log4j.appender.Console=org.apache.log4j.ConsoleAppender
 log4j.appender.Console.layout=org.apache.log4j.PatternLayout
 log4j.appender.Console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss.SSS Z} %p [%c{1}] %m%n
diff --git a/azkaban-common/src/test/resources/test-conf/azkaban-users-test1.xml b/azkaban-common/src/test/resources/test-conf/azkaban-users-test1.xml
index 4c44882..ace1f6f 100644
--- a/azkaban-common/src/test/resources/test-conf/azkaban-users-test1.xml
+++ b/azkaban-common/src/test/resources/test-conf/azkaban-users-test1.xml
@@ -1,12 +1,13 @@
 <azkaban-users>
-	<user username="user0" password="password0" roles="role0" groups="group0"/>
-	<user username="user1" password="password1" roles="role0,role1" groups="group1,group2"/>
-	<user username="user2" password="password2" roles="role0,role1,role2" groups="group1,group2,group3"/>
-	<user username="user3" password="password3" roles="role1, role2" groups="group1, group2"/>
-	<user username="user4" password="password4" roles="role1 , role2" groups="group1 , group2"/>
-	<user username="user5" password="password5" roles="role1 , role2," groups="group1 , group2,"/>
-	<user username="user6" password="password6" roles="role3 , role2, " groups="group1 , group2, "/>
-	<user username="user7" password="password7" groups="group1"/>
-	<user username="user8" password="password8" roles="role3"/>
-	<user username="user9" password="password9"/>
+  <user groups="group0" password="password0" roles="role0" username="user0"/>
+  <user groups="group1,group2" password="password1" roles="role0,role1" username="user1"/>
+  <user groups="group1,group2,group3" password="password2" roles="role0,role1,role2"
+    username="user2"/>
+  <user groups="group1, group2" password="password3" roles="role1, role2" username="user3"/>
+  <user groups="group1 , group2" password="password4" roles="role1 , role2" username="user4"/>
+  <user groups="group1 , group2," password="password5" roles="role1 , role2," username="user5"/>
+  <user groups="group1 , group2, " password="password6" roles="role3 , role2, " username="user6"/>
+  <user groups="group1" password="password7" username="user7"/>
+  <user password="password8" roles="role3" username="user8"/>
+  <user password="password9" username="user9"/>
 </azkaban-users>
\ No newline at end of file
diff --git a/azkaban-common/src/test/resources/test-conf/azkaban-validators-test1.xml b/azkaban-common/src/test/resources/test-conf/azkaban-validators-test1.xml
index 2d0d772..95748be 100644
--- a/azkaban-common/src/test/resources/test-conf/azkaban-validators-test1.xml
+++ b/azkaban-common/src/test/resources/test-conf/azkaban-validators-test1.xml
@@ -1,3 +1,3 @@
 <azkaban-validators>
-  <validator classname="do.not.exist.validator" />
+  <validator classname="do.not.exist.validator"/>
 </azkaban-validators>
\ No newline at end of file
diff --git a/azkaban-common/src/test/resources/test-conf/azkaban-validators-test2.xml b/azkaban-common/src/test/resources/test-conf/azkaban-validators-test2.xml
index 681deee..66151ec 100644
--- a/azkaban-common/src/test/resources/test-conf/azkaban-validators-test2.xml
+++ b/azkaban-common/src/test/resources/test-conf/azkaban-validators-test2.xml
@@ -1,5 +1,5 @@
 <azkaban-validators>
   <validator classname="azkaban.project.validator.TestValidator">
-    <property key="key" value="value" />
+    <property key="key" value="value"/>
   </validator>
 </azkaban-validators>
\ No newline at end of file
diff --git a/azkaban-db/build.gradle b/azkaban-db/build.gradle
index 3578877..ab09bd0 100644
--- a/azkaban-db/build.gradle
+++ b/azkaban-db/build.gradle
@@ -18,33 +18,32 @@ apply plugin: 'distribution'
 
 dependencies {
 
-  // todo kunkun-tang: consolidate dependencies in azkaban-common and azkaban-db
-  compile('log4j:log4j:1.2.16')
-  compile('com.google.inject:guice:4.1.0')
+    // todo kunkun-tang: consolidate dependencies in azkaban-common and azkaban-db
+    compile('log4j:log4j:1.2.16')
+    compile('com.google.inject:guice:4.1.0')
 
-  compile('commons-dbutils:commons-dbutils:1.5')
-  compile('org.apache.commons:commons-dbcp2:2.1.1')
+    compile('commons-dbutils:commons-dbutils:1.5')
+    compile('org.apache.commons:commons-dbcp2:2.1.1')
 
-  testCompile('org.mockito:mockito-all:1.10.19')
-  testRuntime('com.h2database:h2:1.4.193')
+    testCompile('org.mockito:mockito-all:1.10.19')
+    testRuntime('com.h2database:h2:1.4.193')
 }
 
-
 /**
  * concat task
  * Concatenate all the sql queries / files (except update.*.sql) and collect it into a single
  * SQL file.
  */
 task concat() {
-  doLast {
-    ext.destFile = 'build/sql/create-all-sql-' + version + '.sql';
-    ant.concat(destfile: destFile, fixlastline: 'yes') {
-      logger.info('Concating create scripts to ' + destFile)
-      fileset(dir: 'src/main/sql') {
-        exclude(name: 'database.properties')
-      }
+    doLast {
+        ext.destFile = 'build/sql/create-all-sql-' + version + '.sql';
+        ant.concat(destfile: destFile, fixlastline: 'yes') {
+            logger.info('Concating create scripts to ' + destFile)
+            fileset(dir: 'src/main/sql') {
+                exclude(name: 'database.properties')
+            }
+        }
     }
-  }
 }
 
 installDist.dependsOn concat
@@ -52,10 +51,10 @@ distTar.dependsOn concat
 distZip.dependsOn concat
 
 distributions {
-  main {
-    contents {
-      from('src/main/sql')
-      from("$buildDir/sql")
+    main {
+        contents {
+            from('src/main/sql')
+            from("$buildDir/sql")
+        }
     }
-  }
 }
diff --git a/azkaban-db/src/main/java/azkaban/db/AzDBUtil.java b/azkaban-db/src/main/java/azkaban/db/AzDBUtil.java
index f9e19fa..51f963e 100644
--- a/azkaban-db/src/main/java/azkaban/db/AzDBUtil.java
+++ b/azkaban-db/src/main/java/azkaban/db/AzDBUtil.java
@@ -17,5 +17,6 @@
 package azkaban.db;
 
 public class AzDBUtil {
+
   static final int MAX_DB_RETRY_COUNT = 5;
 }
diff --git a/azkaban-db/src/main/java/azkaban/db/AzkabanDataSource.java b/azkaban-db/src/main/java/azkaban/db/AzkabanDataSource.java
index 7f7ad42..9179cd2 100644
--- a/azkaban-db/src/main/java/azkaban/db/AzkabanDataSource.java
+++ b/azkaban-db/src/main/java/azkaban/db/AzkabanDataSource.java
@@ -18,5 +18,6 @@ package azkaban.db;
 import org.apache.commons.dbcp2.BasicDataSource;
 
 public abstract class AzkabanDataSource extends BasicDataSource {
+
   public abstract String getDBType();
 }
diff --git a/azkaban-db/src/main/java/azkaban/db/DatabaseOperator.java b/azkaban-db/src/main/java/azkaban/db/DatabaseOperator.java
index 3cc5b07..5af5532 100644
--- a/azkaban-db/src/main/java/azkaban/db/DatabaseOperator.java
+++ b/azkaban-db/src/main/java/azkaban/db/DatabaseOperator.java
@@ -36,9 +36,9 @@ public interface DatabaseOperator {
    * @param params Initialize the PreparedStatement's IN parameters
    * @param <T> The type of object that the qeury handler returns
    * @return The object returned by the handler.
-   * @throws SQLException
    */
-  <T> T query(String sqlQuery, ResultSetHandler<T> resultHandler, Object...params) throws SQLException;
+  <T> T query(String sqlQuery, ResultSetHandler<T> resultHandler, Object... params)
+      throws SQLException;
 
   /**
    * Provide a way to allow users define custom SQL operations without relying on fixed
@@ -48,7 +48,6 @@ public interface DatabaseOperator {
    * @param operations A sequence of DB operations
    * @param <T> The type of object that the operations returns. Note that T could be null
    * @return T The object returned by the SQL statement, expected by the caller
-   * @throws SQLException
    */
   <T> T transaction(SQLTransaction<T> operations) throws SQLException;
 
@@ -58,7 +57,6 @@ public interface DatabaseOperator {
    * @param updateClause sql statements to execute
    * @param params Initialize the PreparedStatement's IN parameters
    * @return The number of rows updated.
-   * @throws SQLException
    */
-  int update(String updateClause, Object...params) throws SQLException;
+  int update(String updateClause, Object... params) throws SQLException;
 }
diff --git a/azkaban-db/src/main/java/azkaban/db/DatabaseOperatorImpl.java b/azkaban-db/src/main/java/azkaban/db/DatabaseOperatorImpl.java
index 388c38f..68abc9f 100644
--- a/azkaban-db/src/main/java/azkaban/db/DatabaseOperatorImpl.java
+++ b/azkaban-db/src/main/java/azkaban/db/DatabaseOperatorImpl.java
@@ -15,16 +15,15 @@
  */
 package azkaban.db;
 
-import java.sql.Connection;
-import org.apache.commons.dbutils.DbUtils;
-import org.apache.log4j.Logger;
+import static java.util.Objects.*;
 
+import com.google.inject.Inject;
+import java.sql.Connection;
 import java.sql.SQLException;
+import org.apache.commons.dbutils.DbUtils;
 import org.apache.commons.dbutils.QueryRunner;
 import org.apache.commons.dbutils.ResultSetHandler;
-import com.google.inject.Inject;
-
-import static java.util.Objects.*;
+import org.apache.log4j.Logger;
 
 /**
  * Implement AZ DB related operations. This class is thread safe.
@@ -37,23 +36,24 @@ public class DatabaseOperatorImpl implements DatabaseOperator {
 
   /**
    * Note: this queryRunner should include a concrete {@link AzkabanDataSource} inside.
-   *
-   * @param queryRunner
    */
   @Inject
-  public DatabaseOperatorImpl(QueryRunner queryRunner){
+  public DatabaseOperatorImpl(final QueryRunner queryRunner) {
     requireNonNull(queryRunner.getDataSource(), "data source must not be null.");
     this.queryRunner = queryRunner;
   }
 
   /**
-   * query method Implementation. it will call {@link AzkabanDataSource#getConnection()} inside queryrunner.query.
+   * query method Implementation. it will call {@link AzkabanDataSource#getConnection()} inside
+   * queryrunner.query.
    */
   @Override
-  public <T> T query(String baseQuery, ResultSetHandler<T> resultHandler, Object...params) throws SQLException {
-    try{
-      return queryRunner.query(baseQuery, resultHandler, params);
-    } catch (SQLException ex){
+  public <T> T query(final String baseQuery, final ResultSetHandler<T> resultHandler,
+      final Object... params)
+      throws SQLException {
+    try {
+      return this.queryRunner.query(baseQuery, resultHandler, params);
+    } catch (final SQLException ex) {
       // todo kunkun-tang: Retry logics should be implemented here.
       logger.error("query failed", ex);
       throw ex;
@@ -62,19 +62,19 @@ public class DatabaseOperatorImpl implements DatabaseOperator {
 
   /**
    * transaction method Implementation.
-   *
    */
   @Override
-  public <T> T transaction(SQLTransaction<T> operations) throws SQLException {
+  public <T> T transaction(final SQLTransaction<T> operations) throws SQLException {
     Connection conn = null;
-    try{
-      conn = queryRunner.getDataSource().getConnection();
+    try {
+      conn = this.queryRunner.getDataSource().getConnection();
       conn.setAutoCommit(false);
-      DatabaseTransOperator transOperator = new DatabaseTransOperatorImpl(queryRunner, conn);
-      T res = operations.execute(transOperator);
+      final DatabaseTransOperator transOperator = new DatabaseTransOperatorImpl(this.queryRunner,
+          conn);
+      final T res = operations.execute(transOperator);
       conn.commit();
       return res;
-    } catch (SQLException ex) {
+    } catch (final SQLException ex) {
       // todo kunkun-tang: Retry logics should be implemented here.
       logger.error("transaction failed", ex);
       throw ex;
@@ -84,18 +84,18 @@ public class DatabaseOperatorImpl implements DatabaseOperator {
   }
 
   /**
-   * update implementation. it will call {@link AzkabanDataSource#getConnection()} inside queryrunner.update.
+   * update implementation. it will call {@link AzkabanDataSource#getConnection()} inside
+   * queryrunner.update.
    *
    * @param updateClause sql statements to execute
    * @param params Initialize the PreparedStatement's IN parameters
    * @return the number of rows being affected by update
-   * @throws SQLException
    */
   @Override
-  public int update(String updateClause, Object...params) throws SQLException {
-    try{
-      return queryRunner.update(updateClause, params);
-    } catch (SQLException ex){
+  public int update(final String updateClause, final Object... params) throws SQLException {
+    try {
+      return this.queryRunner.update(updateClause, params);
+    } catch (final SQLException ex) {
       // todo kunkun-tang: Retry logics should be implemented here.
       logger.error("update failed", ex);
       throw ex;
diff --git a/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperator.java b/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperator.java
index 23f10b2..42cd42f 100644
--- a/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperator.java
+++ b/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperator.java
@@ -22,13 +22,14 @@ import org.apache.commons.dbutils.ResultSetHandler;
 
 
 /**
- * This interface is designed as an supplement of {@link DatabaseOperator}, which do commit at the end of every query. Given
- * this interface, users/callers (implementation code) should decide where to {@link Connection#commit()}
- * based on their requirements.
+ * This interface is designed as an supplement of {@link DatabaseOperator}, which do commit at the
+ * end of every query. Given this interface, users/callers (implementation code) should decide where
+ * to {@link Connection#commit()} based on their requirements.
  *
- * The diff between DatabaseTransOperator and DatabaseOperator:
- * * Auto commit and Auto close connection are enforced in DatabaseOperator, but not enabled in DatabaseTransOperator.
- * * We usually group a couple of sql operations which need the same connection into DatabaseTransOperator.
+ * The diff between DatabaseTransOperator and DatabaseOperator: * Auto commit and Auto close
+ * connection are enforced in DatabaseOperator, but not enabled in DatabaseTransOperator. * We
+ * usually group a couple of sql operations which need the same connection into
+ * DatabaseTransOperator.
  *
  * @see org.apache.commons.dbutils.QueryRunner
  */
@@ -39,7 +40,6 @@ public interface DatabaseTransOperator {
    * Note that last insert and this operation should use the same connection.
    *
    * @return the last inserted id in mysql per connection.
-   * @throws SQLException
    */
   long getLastInsertId() throws SQLException;
 
@@ -52,7 +52,8 @@ public interface DatabaseTransOperator {
    * @return
    * @throws SQLException
    */
-  <T> T query(String querySql, ResultSetHandler<T> resultHandler, Object... params) throws SQLException;
+  <T> T query(String querySql, ResultSetHandler<T> resultHandler, Object... params)
+      throws SQLException;
 
   /**
    *
@@ -64,7 +65,6 @@ public interface DatabaseTransOperator {
   int update(String updateClause, Object... params) throws SQLException;
 
   /**
-   *
    * @return the JDBC connection associated with this operator.
    */
   Connection getConnection();
diff --git a/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperatorImpl.java b/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperatorImpl.java
index 40a2ee2..78c9de3 100644
--- a/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperatorImpl.java
+++ b/azkaban-db/src/main/java/azkaban/db/DatabaseTransOperatorImpl.java
@@ -32,9 +32,9 @@ class DatabaseTransOperatorImpl implements DatabaseTransOperator {
   private final Connection conn;
   private final QueryRunner queryRunner;
 
-  public DatabaseTransOperatorImpl(QueryRunner queryRunner, Connection conn) {
+  public DatabaseTransOperatorImpl(final QueryRunner queryRunner, final Connection conn) {
     this.conn = conn;
-    this.queryRunner= queryRunner;
+    this.queryRunner = queryRunner;
   }
 
   /**
@@ -44,16 +44,18 @@ class DatabaseTransOperatorImpl implements DatabaseTransOperator {
    *
    * This value cannot be affected by other callers, even if they generate
    * AUTO_INCREMENT values of their own.
-   * @return last insertion ID
    *
+   * @return last insertion ID
    */
   @Override
   public long getLastInsertId() throws SQLException {
     // A default connection: autocommit = true.
     long num = -1;
     try {
-      num = ((Number) queryRunner.query(conn,"SELECT LAST_INSERT_ID();", new ScalarHandler<>(1))).longValue();
-    } catch (SQLException ex) {
+      num = ((Number) this.queryRunner
+          .query(this.conn, "SELECT LAST_INSERT_ID();", new ScalarHandler<>(1)))
+          .longValue();
+    } catch (final SQLException ex) {
       logger.error("can not get last insertion ID");
       throw ex;
     }
@@ -62,10 +64,12 @@ class DatabaseTransOperatorImpl implements DatabaseTransOperator {
 
 
   @Override
-  public <T> T query(String querySql, ResultSetHandler<T> resultHandler, Object... params) throws SQLException {
-    try{
-      return queryRunner.query(conn, querySql, resultHandler, params);
-    } catch (SQLException ex){
+  public <T> T query(final String querySql, final ResultSetHandler<T> resultHandler,
+      final Object... params)
+      throws SQLException {
+    try {
+      return this.queryRunner.query(this.conn, querySql, resultHandler, params);
+    } catch (final SQLException ex) {
       //RETRY Logic should be implemented here if needed.
       throw ex;
     } finally {
@@ -74,10 +78,10 @@ class DatabaseTransOperatorImpl implements DatabaseTransOperator {
   }
 
   @Override
-  public int update(String updateClause, Object... params) throws SQLException {
-    try{
-      return queryRunner.update(conn, updateClause, params);
-    } catch (SQLException ex){
+  public int update(final String updateClause, final Object... params) throws SQLException {
+    try {
+      return this.queryRunner.update(this.conn, updateClause, params);
+    } catch (final SQLException ex) {
       //RETRY Logic should be implemented here if needed.
       throw ex;
     } finally {
@@ -87,6 +91,6 @@ class DatabaseTransOperatorImpl implements DatabaseTransOperator {
 
   @Override
   public Connection getConnection() {
-    return conn;
+    return this.conn;
   }
 }
diff --git a/azkaban-db/src/main/java/azkaban/db/H2FileDataSource.java b/azkaban-db/src/main/java/azkaban/db/H2FileDataSource.java
index 1e671de..63a0ad4 100644
--- a/azkaban-db/src/main/java/azkaban/db/H2FileDataSource.java
+++ b/azkaban-db/src/main/java/azkaban/db/H2FileDataSource.java
@@ -20,9 +20,9 @@ import java.nio.file.Path;
 
 public class H2FileDataSource extends AzkabanDataSource {
 
-  public H2FileDataSource(Path filePath) {
+  public H2FileDataSource(final Path filePath) {
     super();
-    String url = "jdbc:h2:file:" + filePath;
+    final String url = "jdbc:h2:file:" + filePath;
     setDriverClassName("org.h2.Driver");
     setUrl(url);
   }
diff --git a/azkaban-db/src/main/java/azkaban/db/MySQLDataSource.java b/azkaban-db/src/main/java/azkaban/db/MySQLDataSource.java
index 7a7a4dd..676f1e1 100644
--- a/azkaban-db/src/main/java/azkaban/db/MySQLDataSource.java
+++ b/azkaban-db/src/main/java/azkaban/db/MySQLDataSource.java
@@ -23,16 +23,16 @@ import org.apache.log4j.Logger;
 
 public class MySQLDataSource extends AzkabanDataSource {
 
-  private static Logger logger = Logger.getLogger(MySQLDataSource.class);
+  private static final Logger logger = Logger.getLogger(MySQLDataSource.class);
 
   private static volatile MySQLDataSource instance = null;
 
   // TODO kunkun-tang: have guice inject working here
-  private MySQLDataSource(String host, int port, String dbName,
-      String user, String password, int numConnections) {
+  private MySQLDataSource(final String host, final int port, final String dbName,
+      final String user, final String password, final int numConnections) {
     super();
 
-    String url = "jdbc:mysql://" + (host + ":" + port + "/" + dbName);
+    final String url = "jdbc:mysql://" + (host + ":" + port + "/" + dbName);
     addConnectionProperty("useUnicode", "yes");
     addConnectionProperty("characterEncoding", "UTF-8");
     setDriverClassName("com.mysql.jdbc.Driver");
@@ -47,8 +47,8 @@ public class MySQLDataSource extends AzkabanDataSource {
   /**
    * Get a singleton object for MySQL BasicDataSource
    */
-  public static MySQLDataSource getInstance(String host, int port, String dbName,
-      String user, String password, int numConnections) {
+  public static MySQLDataSource getInstance(final String host, final int port, final String dbName,
+      final String user, final String password, final int numConnections) {
     if (instance == null) {
       synchronized (MySQLDataSource.class) {
         if (instance == null) {
@@ -62,7 +62,6 @@ public class MySQLDataSource extends AzkabanDataSource {
 
   /**
    * This method overrides {@link BasicDataSource#getConnection()}, in order to have retry logics.
-   *
    */
   @Override
   public synchronized Connection getConnection() throws SQLException {
@@ -78,12 +77,15 @@ public class MySQLDataSource extends AzkabanDataSource {
          * Every Attempt generates a thread-hanging-time, about 75 seconds, which is hard coded, and can not be changed.
          */
         connection = createDataSource().getConnection();
-        if(connection != null)
+        if (connection != null) {
           return connection;
-      } catch (SQLException ex) {
-        logger.error("Failed to find DB connection. waits 1 minutes and retry. No.Attempt = " + retryAttempt, ex);
+        }
+      } catch (final SQLException ex) {
+        logger.error(
+            "Failed to find DB connection. waits 1 minutes and retry. No.Attempt = " + retryAttempt,
+            ex);
       } finally {
-        retryAttempt ++;
+        retryAttempt++;
       }
     }
     return null;
diff --git a/azkaban-db/src/main/java/azkaban/db/SQLTransaction.java b/azkaban-db/src/main/java/azkaban/db/SQLTransaction.java
index 40860a6..7db1117 100644
--- a/azkaban-db/src/main/java/azkaban/db/SQLTransaction.java
+++ b/azkaban-db/src/main/java/azkaban/db/SQLTransaction.java
@@ -20,13 +20,14 @@ import java.sql.SQLException;
 
 
 /**
- * This interface defines how a sequence of sql statements are organized and packed together. All transaction
- * implementations must follow this interface, and will be called in
- * {@link DatabaseOperatorImpl#transaction(SQLTransaction)}
+ * This interface defines how a sequence of sql statements are organized and packed together. All
+ * transaction implementations must follow this interface, and will be called in {@link
+ * DatabaseOperatorImpl#transaction(SQLTransaction)}
  *
  * @param <T> The transaction return type
  */
 @FunctionalInterface
 public interface SQLTransaction<T> {
+
   public T execute(DatabaseTransOperator transOperator) throws SQLException;
 }
diff --git a/azkaban-db/src/test/java/azkaban/db/AzDBTestUtility.java b/azkaban-db/src/test/java/azkaban/db/AzDBTestUtility.java
index 3fea2b1..4316a6f 100644
--- a/azkaban-db/src/test/java/azkaban/db/AzDBTestUtility.java
+++ b/azkaban-db/src/test/java/azkaban/db/AzDBTestUtility.java
@@ -22,7 +22,7 @@ class AzDBTestUtility {
 
     EmbeddedH2BasicDataSource() {
       super();
-      String url = "jdbc:h2:mem:test";
+      final String url = "jdbc:h2:mem:test";
       setDriverClassName("org.h2.Driver");
       setUrl(url);
     }
diff --git a/azkaban-db/src/test/java/azkaban/db/DatabaseOperatorImplTest.java b/azkaban-db/src/test/java/azkaban/db/DatabaseOperatorImplTest.java
index 82be854..b278cfc 100644
--- a/azkaban-db/src/test/java/azkaban/db/DatabaseOperatorImplTest.java
+++ b/azkaban-db/src/test/java/azkaban/db/DatabaseOperatorImplTest.java
@@ -16,6 +16,8 @@
  */
 package azkaban.db;
 
+import static org.mockito.Mockito.*;
+
 import java.sql.Connection;
 import java.sql.SQLException;
 import java.util.ArrayList;
@@ -23,110 +25,107 @@ import java.util.List;
 import javax.sql.DataSource;
 import org.apache.commons.dbutils.QueryRunner;
 import org.apache.commons.dbutils.ResultSetHandler;
-import org.apache.commons.dbutils.handlers.ScalarHandler;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import static org.mockito.Mockito.*;
-
 
 public class DatabaseOperatorImplTest {
 
-  private AzkabanDataSource datasource = new AzDBTestUtility.EmbeddedH2BasicDataSource();
-
-  private DatabaseOperator dbOperator;
-  private QueryRunner queryRunner;
-  private Connection conn;
-
-  private ResultSetHandler<Integer> handler = rs -> {
+  private static final List<Integer> list = new ArrayList<>();
+  private static final int index_2 = 15;
+  private static int index_1 = 3;
+  private final AzkabanDataSource datasource = new AzDBTestUtility.EmbeddedH2BasicDataSource();
+  private final ResultSetHandler<Integer> handler = rs -> {
     if (!rs.next()) {
       return 0;
     }
     return rs.getInt(1);
   };
-
-  private static final List<Integer> list = new ArrayList<>();
-
-  private static int index_1 = 3;
-  private static int index_2 = 15;
+  private DatabaseOperator dbOperator;
+  private QueryRunner queryRunner;
+  private Connection conn;
 
   @Before
   public void setUp() throws Exception {
-    queryRunner = mock(QueryRunner.class);
+    this.queryRunner = mock(QueryRunner.class);
 
-    conn = datasource.getConnection();
-    DataSource mockDataSource = mock(datasource.getClass());
+    this.conn = this.datasource.getConnection();
+    final DataSource mockDataSource = mock(this.datasource.getClass());
 
-    when(queryRunner.getDataSource()).thenReturn(mockDataSource);
-    when(mockDataSource.getConnection()).thenReturn(conn);
+    when(this.queryRunner.getDataSource()).thenReturn(mockDataSource);
+    when(mockDataSource.getConnection()).thenReturn(this.conn);
 
-    this.dbOperator = new DatabaseOperatorImpl(queryRunner);
+    this.dbOperator = new DatabaseOperatorImpl(this.queryRunner);
 
     list.add(index_1);
     list.add(index_2);
 
     // valid query returns correct value
-    when(queryRunner.query("select * from blah where ? = ?", handler, "id", 2)).thenReturn(index_2);
+    when(this.queryRunner.query("select * from blah where ? = ?", this.handler, "id", 2))
+        .thenReturn(index_2);
 
     // If select an non-existing entry, handler returns 0.
-    when(queryRunner.query("select * from blah where ? = ?", handler, "id", 3)).thenReturn(0);
+    when(this.queryRunner.query("select * from blah where ? = ?", this.handler, "id", 3))
+        .thenReturn(0);
 
     //If typos, throw Exceptions.
-    doThrow(SQLException.class).when(queryRunner).query("sele * from blah where ? = ?", handler, "id", 2);
+    doThrow(SQLException.class).when(this.queryRunner)
+        .query("sele * from blah where ? = ?", this.handler, "id", 2);
 
     doAnswer(invocation -> {
       index_1 = 26;
       return 1;
-    }).when(queryRunner).update("update blah set ? = ?", "1", 26);
+    }).when(this.queryRunner).update("update blah set ? = ?", "1", 26);
   }
 
   @Test
   public void testValidQuery() throws Exception {
-    int res = dbOperator.query("select * from blah where ? = ?", handler, "id", 2);
+    final int res = this.dbOperator.query("select * from blah where ? = ?", this.handler, "id", 2);
     Assert.assertEquals(15, res);
-    verify(queryRunner).query("select * from blah where ? = ?", handler, "id", 2);
+    verify(this.queryRunner).query("select * from blah where ? = ?", this.handler, "id", 2);
   }
 
   @Test
   public void testInvalidQuery() throws Exception {
-    int res = dbOperator.query("select * from blah where ? = ?", handler, "id", 3);
+    final int res = this.dbOperator.query("select * from blah where ? = ?", this.handler, "id", 3);
     Assert.assertEquals(0, res);
   }
 
   @Test(expected = SQLException.class)
   public void testTypoSqlStatement() throws Exception {
     System.out.println("testTypoSqlStatement");
-    dbOperator.query("sele * from blah where ? = ?", handler, "id", 2);
+    this.dbOperator.query("sele * from blah where ? = ?", this.handler, "id", 2);
   }
 
   @Test
   public void testTransaction() throws Exception {
-    when(queryRunner.update(conn, "update blah set ? = ?", "1", 26)).thenReturn(1);
-    when(queryRunner.query(conn, "select * from blah where ? = ?", handler, "id", 1)).thenReturn(26);
+    when(this.queryRunner.update(this.conn, "update blah set ? = ?", "1", 26)).thenReturn(1);
+    when(this.queryRunner.query(this.conn, "select * from blah where ? = ?", this.handler, "id", 1))
+        .thenReturn(26);
 
-    SQLTransaction<Integer> transaction = transOperator -> {
+    final SQLTransaction<Integer> transaction = transOperator -> {
       transOperator.update("update blah set ? = ?", "1", 26);
-      return transOperator.query("select * from blah where ? = ?", handler, "id", 1);
+      return transOperator.query("select * from blah where ? = ?", this.handler, "id", 1);
     };
 
-    int res = dbOperator.transaction(transaction);
+    final int res = this.dbOperator.transaction(transaction);
     Assert.assertEquals(26, res);
   }
 
   @Test
   public void testValidUpdate() throws Exception {
-    int res = dbOperator.update("update blah set ? = ?", "1", 26);
+    final int res = this.dbOperator.update("update blah set ? = ?", "1", 26);
 
     // 1 row is affected
     Assert.assertEquals(1, res);
     Assert.assertEquals(26, index_1);
-    verify(queryRunner).update("update blah set ? = ?", "1", 26);
+    verify(this.queryRunner).update("update blah set ? = ?", "1", 26);
   }
 
   @Test
   public void testInvalidUpdate() throws Exception {
-    int res = dbOperator.update("update blah set ? = ?", "3", 26);
+    final int res = this.dbOperator.update("update blah set ? = ?", "3", 26);
 
     // 0 row is affected
     Assert.assertEquals(0, res);
diff --git a/azkaban-db/src/test/java/azkaban/db/DatabaseTransOperatorImplTest.java b/azkaban-db/src/test/java/azkaban/db/DatabaseTransOperatorImplTest.java
index 86f11f6..660ba95 100644
--- a/azkaban-db/src/test/java/azkaban/db/DatabaseTransOperatorImplTest.java
+++ b/azkaban-db/src/test/java/azkaban/db/DatabaseTransOperatorImplTest.java
@@ -26,15 +26,18 @@ public class DatabaseTransOperatorImplTest {
 
   @Before
   public void setUp() throws Exception {
-    AzkabanDataSource datasource = new AzDBTestUtility.EmbeddedH2BasicDataSource();
-    DatabaseTransOperator operator = new DatabaseTransOperatorImpl(new QueryRunner(), datasource.getConnection());
+    final AzkabanDataSource datasource = new AzDBTestUtility.EmbeddedH2BasicDataSource();
+    final DatabaseTransOperator operator = new DatabaseTransOperatorImpl(new QueryRunner(),
+        datasource.getConnection());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testQuery() throws Exception {
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testUpdate() throws Exception {
   }
 }
diff --git a/azkaban-db/src/test/resources/log4j.properties b/azkaban-db/src/test/resources/log4j.properties
index 182c9a1..eaf3adb 100644
--- a/azkaban-db/src/test/resources/log4j.properties
+++ b/azkaban-db/src/test/resources/log4j.properties
@@ -1,7 +1,5 @@
 log4j.rootLogger=INFO, Console
-
 log4j.appender.Console=org.apache.log4j.ConsoleAppender
 log4j.appender.Console.layout=org.apache.log4j.PatternLayout
 log4j.appender.Console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss.SSS Z} %p [%c{1}] %m%n
-
 log4j.category.velocity=INFO
\ No newline at end of file
diff --git a/azkaban-exec-server/build.gradle b/azkaban-exec-server/build.gradle
index d4f3322..fd9acb8 100644
--- a/azkaban-exec-server/build.gradle
+++ b/azkaban-exec-server/build.gradle
@@ -1,38 +1,38 @@
 apply plugin: 'distribution'
 
 dependencies {
-  compile(project(':azkaban-common'))
-  compile('org.apache.kafka:kafka-log4j-appender:0.10.0.0')
-  compile('com.googlecode.json-simple:json-simple:1.1.1') {
-    exclude group: 'junit', module: 'junit'
-  }
+    compile(project(':azkaban-common'))
+    compile('org.apache.kafka:kafka-log4j-appender:0.10.0.0')
+    compile('com.googlecode.json-simple:json-simple:1.1.1') {
+        exclude group: 'junit', module: 'junit'
+    }
 
-  runtime(project(':azkaban-hadoop-security-plugin'))
-  runtime('org.slf4j:slf4j-log4j12:1.7.18')
+    runtime(project(':azkaban-hadoop-security-plugin'))
+    runtime('org.slf4j:slf4j-log4j12:1.7.18')
 
-  testCompile(project(path: ':azkaban-common', configuration: 'testCompile'))
-  testCompile(project(':azkaban-common').sourceSets.test.output)
+    testCompile(project(path: ':azkaban-common', configuration: 'testCompile'))
+    testCompile(project(':azkaban-common').sourceSets.test.output)
 
-  testRuntime "com.h2database:h2:1.4.193"
+    testRuntime "com.h2database:h2:1.4.193"
 }
 
 distributions {
-  main {
-    contents {
-      from('src/main/bash') {
-        into 'bin'
-        fileMode = 0755
-      }
-      from('../azkaban-common/src/main/bash') {
-        into 'bin'
-        fileMode = 0755
-      }
-      from(configurations.runtime) {
-        into 'lib'
-      }
-      from(jar) {
-        into 'lib'
-      }
+    main {
+        contents {
+            from('src/main/bash') {
+                into 'bin'
+                fileMode = 0755
+            }
+            from('../azkaban-common/src/main/bash') {
+                into 'bin'
+                fileMode = 0755
+            }
+            from(configurations.runtime) {
+                into 'lib'
+            }
+            from(jar) {
+                into 'lib'
+            }
+        }
     }
-  }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/action/KillExecutionAction.java b/azkaban-exec-server/src/main/java/azkaban/execapp/action/KillExecutionAction.java
index 0d0f4e6..232afb3 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/action/KillExecutionAction.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/action/KillExecutionAction.java
@@ -16,14 +16,12 @@
 
 package azkaban.execapp.action;
 
-import java.util.HashMap;
-import java.util.Map;
-
 import azkaban.Constants;
 import azkaban.ServiceProvider;
 import azkaban.execapp.FlowRunnerManager;
 import azkaban.trigger.TriggerAction;
-
+import java.util.HashMap;
+import java.util.Map;
 import org.apache.log4j.Logger;
 
 
@@ -34,69 +32,68 @@ public class KillExecutionAction implements TriggerAction {
   private static final Logger logger = Logger
       .getLogger(KillExecutionAction.class);
 
-  private String actionId;
-  private int execId;
+  private final String actionId;
+  private final int execId;
 
-  public KillExecutionAction(String actionId, int execId) {
+  public KillExecutionAction(final String actionId, final int execId) {
     this.execId = execId;
     this.actionId = actionId;
   }
 
-  @Override
-  public String getId() {
-    return actionId;
-  }
-
-  @Override
-  public String getType() {
-    return type;
-  }
-
-  @SuppressWarnings("unchecked")
-  public static KillExecutionAction createFromJson(Object obj) {
+  public static KillExecutionAction createFromJson(final Object obj) {
     return createFromJson((HashMap<String, Object>) obj);
   }
 
-  public static KillExecutionAction createFromJson(HashMap<String, Object> obj) {
-    Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
-    String objType = (String) jsonObj.get("type");
+  public static KillExecutionAction createFromJson(final HashMap<String, Object> obj) {
+    final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
+    final String objType = (String) jsonObj.get("type");
     if (!objType.equals(type)) {
       throw new RuntimeException("Cannot create action of " + type + " from "
           + objType);
     }
-    String actionId = (String) jsonObj.get("actionId");
-    int execId = Integer.valueOf((String) jsonObj.get("execId"));
+    final String actionId = (String) jsonObj.get("actionId");
+    final int execId = Integer.valueOf((String) jsonObj.get("execId"));
     return new KillExecutionAction(actionId, execId);
   }
 
-  @SuppressWarnings("unchecked")
   @Override
-  public KillExecutionAction fromJson(Object obj) throws Exception {
+  public String getId() {
+    return this.actionId;
+  }
+
+  @Override
+  public String getType() {
+    return type;
+  }
+
+  @Override
+  public KillExecutionAction fromJson(final Object obj) throws Exception {
     return createFromJson((HashMap<String, Object>) obj);
   }
 
   @Override
   public Object toJson() {
-    Map<String, Object> jsonObj = new HashMap<String, Object>();
-    jsonObj.put("actionId", actionId);
+    final Map<String, Object> jsonObj = new HashMap<>();
+    jsonObj.put("actionId", this.actionId);
     jsonObj.put("type", type);
-    jsonObj.put("execId", String.valueOf(execId));
+    jsonObj.put("execId", String.valueOf(this.execId));
     return jsonObj;
   }
 
   @Override
   public void doAction() throws Exception {
-    logger.info("ready to kill execution " + execId);
-    ServiceProvider.SERVICE_PROVIDER.getInstance(FlowRunnerManager.class).cancelFlow(execId, Constants.AZKABAN_SLA_CHECKER_USERNAME);
+    logger.info("ready to kill execution " + this.execId);
+    ServiceProvider.SERVICE_PROVIDER.getInstance(FlowRunnerManager.class)
+        .cancelFlow(this.execId, Constants.AZKABAN_SLA_CHECKER_USERNAME);
   }
 
   @Override
-  public void setContext(Map<String, Object> context) {
+  public void setContext(final Map<String, Object> context) {
   }
 
   @Override
   public String getDescription() {
-    return type + " for " + execId;
+    return type + " for " + this.execId;
   }
 
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecServerModule.java b/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecServerModule.java
index 3654919..56e0b35 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecServerModule.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecServerModule.java
@@ -24,11 +24,12 @@ import com.google.inject.Scopes;
 
 
 /**
- * This Guice module is currently a one place container for all bindings in the current module. This is intended to
- * help during the migration process to Guice. Once this class starts growing we can move towards more modular
- * structuring of Guice components.
+ * This Guice module is currently a one place container for all bindings in the current module. This
+ * is intended to help during the migration process to Guice. Once this class starts growing we can
+ * move towards more modular structuring of Guice components.
  */
 public class AzkabanExecServerModule extends AbstractModule {
+
   @Override
   protected void configure() {
     bind(ExecutorLoader.class).to(JdbcExecutorLoader.class).in(Scopes.SINGLETON);
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecutorServer.java b/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecutorServer.java
index 2ce08b5..191b810 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecutorServer.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/AzkabanExecutorServer.java
@@ -16,6 +16,11 @@
 
 package azkaban.execapp;
 
+import static azkaban.Constants.AZKABAN_EXECUTOR_PORT_FILENAME;
+import static azkaban.ServiceProvider.SERVICE_PROVIDER;
+import static com.google.common.base.Preconditions.checkState;
+import static java.util.Objects.requireNonNull;
+
 import azkaban.AzkabanCommonModule;
 import azkaban.Constants;
 import azkaban.execapp.event.JobCallbackManager;
@@ -67,20 +72,14 @@ import org.mortbay.jetty.servlet.Context;
 import org.mortbay.jetty.servlet.ServletHolder;
 import org.mortbay.thread.QueuedThreadPool;
 
-import static azkaban.Constants.*;
-import static azkaban.ServiceProvider.*;
-import static com.google.common.base.Preconditions.*;
-import static java.util.Objects.*;
-
 public class AzkabanExecutorServer {
-  private static final String CUSTOM_JMX_ATTRIBUTE_PROCESSOR_PROPERTY = "jmx.attribute.processor.class";
-  private static final Logger logger = Logger.getLogger(AzkabanExecutorServer.class);
-  private static final int MAX_FORM_CONTENT_SIZE = 10 * 1024 * 1024;
 
   public static final String JOBTYPE_PLUGIN_DIR = "azkaban.jobtype.plugin.dir";
   public static final String METRIC_INTERVAL = "executor.metric.milisecinterval.";
   public static final int DEFAULT_HEADER_BUFFER_SIZE = 4096;
-
+  private static final String CUSTOM_JMX_ATTRIBUTE_PROCESSOR_PROPERTY = "jmx.attribute.processor.class";
+  private static final Logger logger = Logger.getLogger(AzkabanExecutorServer.class);
+  private static final int MAX_FORM_CONTENT_SIZE = 10 * 1024 * 1024;
   private static final String DEFAULT_TIMEZONE_ID = "default.timezone.id";
   private static final int DEFAULT_THREAD_NUMBER = 50;
 
@@ -91,18 +90,18 @@ public class AzkabanExecutorServer {
   private final Props props;
   private final Server server;
 
-  private final ArrayList<ObjectName> registeredMBeans = new ArrayList<ObjectName>();
+  private final ArrayList<ObjectName> registeredMBeans = new ArrayList<>();
   private MBeanServer mbeanServer;
 
   @Inject
-  public AzkabanExecutorServer(Props props,
-      ExecutorLoader executionLoader,
-      FlowRunnerManager runnerManager) throws Exception {
+  public AzkabanExecutorServer(final Props props,
+      final ExecutorLoader executionLoader,
+      final FlowRunnerManager runnerManager) throws Exception {
     this.props = props;
     this.executionLoader = executionLoader;
     this.runnerManager = runnerManager;
 
-    server = createJettyServer(props);
+    this.server = createJettyServer(props);
 
     JmxJobMBeanManager.getInstance().initialize(props);
 
@@ -115,8 +114,8 @@ public class AzkabanExecutorServer {
     loadCustomJMXAttributeProcessor(props);
 
     try {
-      server.start();
-    } catch (Exception e) {
+      this.server.start();
+    } catch (final Exception e) {
       logger.error(e);
       Utils.croak(e.getMessage(), 1);
     }
@@ -131,22 +130,122 @@ public class AzkabanExecutorServer {
     }
   }
 
-  private Server createJettyServer(Props props) {
-    int maxThreads = props.getInt("executor.maxThreads", DEFAULT_THREAD_NUMBER);
+  /**
+   * Returns the currently executing executor server, if one exists.
+   */
+  public static AzkabanExecutorServer getApp() {
+    return app;
+  }
+
+  /**
+   * Azkaban using Jetty
+   */
+  public static void main(final String[] args) throws Exception {
+    // Redirect all std out and err messages into log4j
+    StdOutErrRedirect.redirectOutAndErrToLog();
+
+    logger.info("Starting Jetty Azkaban Executor...");
+    final Props props = AzkabanServer.loadProps(args);
+
+    if (props == null) {
+      logger.error("Azkaban Properties not loaded.");
+      logger.error("Exiting Azkaban Executor Server...");
+      return;
+    }
+
+    /* Initialize Guice Injector */
+    final Injector injector = Guice
+        .createInjector(new AzkabanCommonModule(props), new AzkabanExecServerModule());
+    SERVICE_PROVIDER.setInjector(injector);
+
+    launch(injector.getInstance(AzkabanExecutorServer.class));
+  }
+
+  public static void launch(final AzkabanExecutorServer azkabanExecutorServer) throws Exception {
+    setupTimeZone(azkabanExecutorServer.getAzkabanProps());
+    app = azkabanExecutorServer;
+
+    Runtime.getRuntime().addShutdownHook(new Thread() {
+
+      @Override
+      public void run() {
+        try {
+          logTopMemoryConsumers();
+        } catch (final Exception e) {
+          logger.info(("Exception when logging top memory consumers"), e);
+        }
+
+        final String host = app.getHost();
+        final int port = app.getPort();
+        try {
+          logger.info(String
+              .format("Removing executor(host: %s, port: %s) entry from database...", host, port));
+          app.getExecutorLoader().removeExecutor(host, port);
+        } catch (final ExecutorManagerException ex) {
+          logger.error(
+              String.format("Exception when removing executor(host: %s, port: %s)", host, port),
+              ex);
+        }
+
+        logger.warn("Shutting down executor...");
+        try {
+          app.shutdownNow();
+        } catch (final Exception e) {
+          logger.error("Error while shutting down http server.", e);
+        }
+      }
+
+      public void logTopMemoryConsumers() throws Exception, IOException {
+        if (new File("/bin/bash").exists() && new File("/bin/ps").exists()
+            && new File("/usr/bin/head").exists()) {
+          logger.info("logging top memeory consumer");
+
+          final java.lang.ProcessBuilder processBuilder =
+              new java.lang.ProcessBuilder("/bin/bash", "-c",
+                  "/bin/ps aux --sort -rss | /usr/bin/head");
+          final Process p = processBuilder.start();
+          p.waitFor();
+
+          final InputStream is = p.getInputStream();
+          final java.io.BufferedReader reader =
+              new java.io.BufferedReader(new InputStreamReader(is));
+          String line = null;
+          while ((line = reader.readLine()) != null) {
+            logger.info(line);
+          }
+          is.close();
+        }
+      }
+    });
+  }
+
+  private static void setupTimeZone(final Props azkabanSettings) {
+    if (azkabanSettings.containsKey(DEFAULT_TIMEZONE_ID)) {
+      final String timezone = azkabanSettings.getString(DEFAULT_TIMEZONE_ID);
+      System.setProperty("user.timezone", timezone);
+      TimeZone.setDefault(TimeZone.getTimeZone(timezone));
+      DateTimeZone.setDefault(DateTimeZone.forID(timezone));
+
+      logger.info("Setting timezone to " + timezone);
+    }
+  }
+
+  private Server createJettyServer(final Props props) {
+    final int maxThreads = props.getInt("executor.maxThreads", DEFAULT_THREAD_NUMBER);
 
     /*
      * Default to a port number 0 (zero)
      * The Jetty server automatically finds an unused port when the port number is set to zero
      * TODO: This is using a highly outdated version of jetty [year 2010]. needs to be updated.
      */
-    Server server = new Server(props.getInt("executor.port", 0));
-    QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads);
+    final Server server = new Server(props.getInt("executor.port", 0));
+    final QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads);
     server.setThreadPool(httpThreadPool);
 
-    boolean isStatsOn = props.getBoolean("executor.connector.stats", true);
+    final boolean isStatsOn = props.getBoolean("executor.connector.stats", true);
     logger.info("Setting up connector with stats on: " + isStatsOn);
 
-    for (Connector connector : server.getConnectors()) {
+    for (final Connector connector : server.getConnectors()) {
       connector.setStatsOn(isStatsOn);
       logger.info(String.format(
           "Jetty connector name: %s, default header buffer size: %d",
@@ -158,7 +257,7 @@ public class AzkabanExecutorServer {
           connector.getName(), connector.getHeaderBufferSize()));
     }
 
-    Context root = new Context(server, "/", Context.SESSIONS);
+    final Context root = new Context(server, "/", Context.SESSIONS);
     root.setMaxFormContentSize(MAX_FORM_CONTENT_SIZE);
 
     root.addServlet(new ServletHolder(new ExecutorServlet()), "/executor");
@@ -174,7 +273,7 @@ public class AzkabanExecutorServer {
     ExecMetrics.INSTANCE.addFlowRunnerManagerMetrics(getFlowRunnerManager());
 
     logger.info("starting reporting Executor Metrics");
-    MetricsManager.INSTANCE.startReporting("AZ-EXEC", props);
+    MetricsManager.INSTANCE.startReporting("AZ-EXEC", this.props);
   }
 
   private void insertExecutorEntryIntoDB() {
@@ -182,12 +281,12 @@ public class AzkabanExecutorServer {
       final String host = requireNonNull(getHost());
       final int port = getPort();
       checkState(port != -1);
-      final Executor executor = executionLoader.fetchExecutor(host, port);
+      final Executor executor = this.executionLoader.fetchExecutor(host, port);
       if (executor == null) {
-        executionLoader.addExecutor(host, port);
+        this.executionLoader.addExecutor(host, port);
       }
       // If executor already exists, ignore it
-    } catch (ExecutorManagerException e) {
+    } catch (final ExecutorManagerException e) {
       logger.error("Error inserting executor entry into DB", e);
       Throwables.propagate(e);
     }
@@ -195,17 +294,18 @@ public class AzkabanExecutorServer {
 
   private void dumpPortToFile() {
     // By default this should write to the working directory
-    try (BufferedWriter writer = new BufferedWriter(new FileWriter(AZKABAN_EXECUTOR_PORT_FILENAME))) {
+    try (BufferedWriter writer = new BufferedWriter(
+        new FileWriter(AZKABAN_EXECUTOR_PORT_FILENAME))) {
       writer.write(String.valueOf(getPort()));
       writer.write("\n");
-    } catch (IOException e) {
+    } catch (final IOException e) {
       logger.error(e);
       Throwables.propagate(e);
     }
   }
 
-  private void configureJobCallback(Props props) {
-    boolean jobCallbackEnabled =
+  private void configureJobCallback(final Props props) {
+    final boolean jobCallbackEnabled =
         props.getBoolean("azkaban.executor.jobcallback.enabled", true);
 
     logger.info("Job callback enabled? " + jobCallbackEnabled);
@@ -217,46 +317,44 @@ public class AzkabanExecutorServer {
 
   /**
    * Configure Metric Reporting as per azkaban.properties settings
-   *
-   * @throws MetricException
    */
   private void configureMetricReports() throws MetricException {
-    Props props = getAzkabanProps();
+    final Props props = getAzkabanProps();
     if (props != null && props.getBoolean("executor.metric.reports", false)) {
       logger.info("Starting to configure Metric Reports");
-      MetricReportManager metricManager = MetricReportManager.getInstance();
-      IMetricEmitter metricEmitter = new InMemoryMetricEmitter(props);
+      final MetricReportManager metricManager = MetricReportManager.getInstance();
+      final IMetricEmitter metricEmitter = new InMemoryMetricEmitter(props);
       metricManager.addMetricEmitter(metricEmitter);
 
       logger.info("Adding number of failed flow metric");
       metricManager.addMetric(new NumFailedFlowMetric(metricManager, props
           .getInt(METRIC_INTERVAL
-              + NumFailedFlowMetric.NUM_FAILED_FLOW_METRIC_NAME,
+                  + NumFailedFlowMetric.NUM_FAILED_FLOW_METRIC_NAME,
               props.getInt(METRIC_INTERVAL + "default"))));
 
       logger.info("Adding number of failed jobs metric");
       metricManager.addMetric(new NumFailedJobMetric(metricManager, props
           .getInt(METRIC_INTERVAL
-              + NumFailedJobMetric.NUM_FAILED_JOB_METRIC_NAME,
+                  + NumFailedJobMetric.NUM_FAILED_JOB_METRIC_NAME,
               props.getInt(METRIC_INTERVAL + "default"))));
 
       logger.info("Adding number of running Jobs metric");
       metricManager.addMetric(new NumRunningJobMetric(metricManager, props
           .getInt(METRIC_INTERVAL
-              + NumRunningJobMetric.NUM_RUNNING_JOB_METRIC_NAME,
+                  + NumRunningJobMetric.NUM_RUNNING_JOB_METRIC_NAME,
               props.getInt(METRIC_INTERVAL + "default"))));
 
       logger.info("Adding number of running flows metric");
-      metricManager.addMetric(new NumRunningFlowMetric(runnerManager,
+      metricManager.addMetric(new NumRunningFlowMetric(this.runnerManager,
           metricManager, props.getInt(METRIC_INTERVAL
               + NumRunningFlowMetric.NUM_RUNNING_FLOW_METRIC_NAME,
-              props.getInt(METRIC_INTERVAL + "default"))));
+          props.getInt(METRIC_INTERVAL + "default"))));
 
       logger.info("Adding number of queued flows metric");
-      metricManager.addMetric(new NumQueuedFlowMetric(runnerManager,
+      metricManager.addMetric(new NumQueuedFlowMetric(this.runnerManager,
           metricManager, props.getInt(METRIC_INTERVAL
               + NumQueuedFlowMetric.NUM_QUEUED_FLOW_METRIC_NAME,
-              props.getInt(METRIC_INTERVAL + "default"))));
+          props.getInt(METRIC_INTERVAL + "default"))));
 
       logger.info("Completed configuring Metric Reports");
     }
@@ -272,21 +370,19 @@ public class AzkabanExecutorServer {
    *
    * Basically the custom class must have a constructor that takes an argument
    * with type Properties.
-   *
-   * @param props
    */
-  private void loadCustomJMXAttributeProcessor(Props props) {
-    String jmxAttributeEmitter =
+  private void loadCustomJMXAttributeProcessor(final Props props) {
+    final String jmxAttributeEmitter =
         props.get(CUSTOM_JMX_ATTRIBUTE_PROCESSOR_PROPERTY);
     if (jmxAttributeEmitter != null) {
       try {
         logger.info("jmxAttributeEmitter: " + jmxAttributeEmitter);
-        Constructor<Props>[] constructors =
+        final Constructor<Props>[] constructors =
             (Constructor<Props>[]) Class.forName(jmxAttributeEmitter)
                 .getConstructors();
 
         constructors[0].newInstance(props.toProperties());
-      } catch (Exception e) {
+      } catch (final Exception e) {
         logger.error("Encountered error while loading and instantiating "
             + jmxAttributeEmitter, e);
         throw new IllegalStateException(
@@ -300,133 +396,30 @@ public class AzkabanExecutorServer {
   }
 
   public ExecutorLoader getExecutorLoader() {
-    return executionLoader;
+    return this.executionLoader;
   }
 
   /**
    * Returns the global azkaban properties
-   *
-   * @return
    */
   public Props getAzkabanProps() {
-    return props;
-  }
-
-  /**
-   * Returns the currently executing executor server, if one exists.
-   *
-   * @return
-   */
-  public static AzkabanExecutorServer getApp() {
-    return app;
-  }
-
-  /**
-   * Azkaban using Jetty
-   *
-   * @param args
-   * @throws IOException
-   */
-  public static void main(String[] args) throws Exception {
-    // Redirect all std out and err messages into log4j
-    StdOutErrRedirect.redirectOutAndErrToLog();
-
-    logger.info("Starting Jetty Azkaban Executor...");
-    Props props = AzkabanServer.loadProps(args);
-
-    if (props == null) {
-      logger.error("Azkaban Properties not loaded.");
-      logger.error("Exiting Azkaban Executor Server...");
-      return;
-    }
-
-    /* Initialize Guice Injector */
-    final Injector injector = Guice.createInjector(new AzkabanCommonModule(props), new AzkabanExecServerModule());
-    SERVICE_PROVIDER.setInjector(injector);
-
-    launch(injector.getInstance(AzkabanExecutorServer.class));
-  }
-
-  public static void launch(AzkabanExecutorServer azkabanExecutorServer) throws Exception {
-    setupTimeZone(azkabanExecutorServer.getAzkabanProps());
-    app = azkabanExecutorServer;
-
-    Runtime.getRuntime().addShutdownHook(new Thread() {
-
-      @Override
-      public void run() {
-        try {
-          logTopMemoryConsumers();
-        } catch (Exception e) {
-          logger.info(("Exception when logging top memory consumers"), e);
-        }
-
-        String host = app.getHost();
-        int port = app.getPort();
-        try {
-          logger.info(String.format("Removing executor(host: %s, port: %s) entry from database...", host, port));
-          app.getExecutorLoader().removeExecutor(host, port);
-        } catch (ExecutorManagerException ex) {
-          logger.error(String.format("Exception when removing executor(host: %s, port: %s)", host, port), ex);
-        }
-
-        logger.warn("Shutting down executor...");
-        try {
-          app.shutdownNow();
-        } catch (Exception e) {
-          logger.error("Error while shutting down http server.", e);
-        }
-      }
-
-      public void logTopMemoryConsumers() throws Exception, IOException {
-        if (new File("/bin/bash").exists() && new File("/bin/ps").exists()
-            && new File("/usr/bin/head").exists()) {
-          logger.info("logging top memeory consumer");
-
-          java.lang.ProcessBuilder processBuilder =
-              new java.lang.ProcessBuilder("/bin/bash", "-c",
-                  "/bin/ps aux --sort -rss | /usr/bin/head");
-          Process p = processBuilder.start();
-          p.waitFor();
-
-          InputStream is = p.getInputStream();
-          java.io.BufferedReader reader =
-              new java.io.BufferedReader(new InputStreamReader(is));
-          String line = null;
-          while ((line = reader.readLine()) != null) {
-            logger.info(line);
-          }
-          is.close();
-        }
-      }
-    });
-  }
-
-  private static void setupTimeZone(Props azkabanSettings) {
-    if (azkabanSettings.containsKey(DEFAULT_TIMEZONE_ID)) {
-      String timezone = azkabanSettings.getString(DEFAULT_TIMEZONE_ID);
-      System.setProperty("user.timezone", timezone);
-      TimeZone.setDefault(TimeZone.getTimeZone(timezone));
-      DateTimeZone.setDefault(DateTimeZone.forID(timezone));
-
-      logger.info("Setting timezone to " + timezone);
-    }
+    return this.props;
   }
 
   public FlowRunnerManager getFlowRunnerManager() {
-    return runnerManager;
+    return this.runnerManager;
   }
 
   private void configureMBeanServer() {
     logger.info("Registering MBeans...");
-    mbeanServer = ManagementFactory.getPlatformMBeanServer();
+    this.mbeanServer = ManagementFactory.getPlatformMBeanServer();
 
-    registerMbean("executorJetty", new JmxJettyServer(server));
-    registerMbean("flowRunnerManager", new JmxFlowRunnerManager(runnerManager));
+    registerMbean("executorJetty", new JmxJettyServer(this.server));
+    registerMbean("flowRunnerManager", new JmxFlowRunnerManager(this.runnerManager));
     registerMbean("jobJMXMBean", JmxJobMBeanManager.getInstance());
 
     if (JobCallbackManager.isInitialized()) {
-      JobCallbackManager jobCallbackMgr = JobCallbackManager.getInstance();
+      final JobCallbackManager jobCallbackMgr = JobCallbackManager.getInstance();
       registerMbean("jobCallbackJMXMBean",
           jobCallbackMgr.getJmxJobCallbackMBean());
     }
@@ -434,24 +427,24 @@ public class AzkabanExecutorServer {
 
   public void close() {
     try {
-      for (ObjectName name : registeredMBeans) {
-        mbeanServer.unregisterMBean(name);
+      for (final ObjectName name : this.registeredMBeans) {
+        this.mbeanServer.unregisterMBean(name);
         logger.info("Jmx MBean " + name.getCanonicalName() + " unregistered.");
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error("Failed to cleanup MBeanServer", e);
     }
   }
 
-  private void registerMbean(String name, Object mbean) {
-    Class<?> mbeanClass = mbean.getClass();
-    ObjectName mbeanName;
+  private void registerMbean(final String name, final Object mbean) {
+    final Class<?> mbeanClass = mbean.getClass();
+    final ObjectName mbeanName;
     try {
       mbeanName = new ObjectName(mbeanClass.getName() + ":name=" + name);
-      mbeanServer.registerMBean(mbean, mbeanName);
+      this.mbeanServer.registerMBean(mbean, mbeanName);
       logger.info("Bean " + mbeanClass.getCanonicalName() + " registered.");
-      registeredMBeans.add(mbeanName);
-    } catch (Exception e) {
+      this.registeredMBeans.add(mbeanName);
+    } catch (final Exception e) {
       logger.error("Error registering mbean " + mbeanClass.getCanonicalName(),
           e);
     }
@@ -459,22 +452,22 @@ public class AzkabanExecutorServer {
   }
 
   public List<ObjectName> getMbeanNames() {
-    return registeredMBeans;
+    return this.registeredMBeans;
   }
 
-  public MBeanInfo getMBeanInfo(ObjectName name) {
+  public MBeanInfo getMBeanInfo(final ObjectName name) {
     try {
-      return mbeanServer.getMBeanInfo(name);
-    } catch (Exception e) {
+      return this.mbeanServer.getMBeanInfo(name);
+    } catch (final Exception e) {
       logger.error(e);
       return null;
     }
   }
 
-  public Object getMBeanAttribute(ObjectName name, String attribute) {
+  public Object getMBeanAttribute(final ObjectName name, final String attribute) {
     try {
-      return mbeanServer.getAttribute(name, attribute);
-    } catch (Exception e) {
+      return this.mbeanServer.getAttribute(name, attribute);
+    } catch (final Exception e) {
       logger.error(e);
       return null;
     }
@@ -487,9 +480,10 @@ public class AzkabanExecutorServer {
    * @return hostname
    */
   public String getHost() {
-    if(props.containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_HOST_NAME)) {
-      String hostName = props.getString(Constants.ConfigurationKeys.AZKABAN_SERVER_HOST_NAME);
-      if(!StringUtils.isEmpty(hostName)) {
+    if (this.props.containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_HOST_NAME)) {
+      final String hostName = this.props
+          .getString(Constants.ConfigurationKeys.AZKABAN_SERVER_HOST_NAME);
+      if (!StringUtils.isEmpty(hostName)) {
         return hostName;
       }
     }
@@ -497,7 +491,7 @@ public class AzkabanExecutorServer {
     String host = "unkownHost";
     try {
       host = InetAddress.getLocalHost().getCanonicalHostName();
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error("Failed to fetch LocalHostName");
     }
     return host;
@@ -505,10 +499,11 @@ public class AzkabanExecutorServer {
 
   /**
    * Get the current server port
+   *
    * @return the port at which the executor server is running
    */
   public int getPort() {
-    final Connector[] connectors = server.getConnectors();
+    final Connector[] connectors = this.server.getConnectors();
     checkState(connectors.length >= 1, "Server must have at least 1 connector");
 
     // The first connector is created upon initializing the server. That's the one that has the port.
@@ -517,7 +512,6 @@ public class AzkabanExecutorServer {
 
   /**
    * Returns host:port combination for currently running executor
-   * @return
    */
   public String getExecutorHostPort() {
     return getHost() + ":" + getPort();
@@ -525,8 +519,8 @@ public class AzkabanExecutorServer {
 
   /**
    * Shutdown the server.
-   *  - performs a safe shutdown. Waits for completion of current tasks
-   *  - spawns a shutdown thread and returns immediately.
+   * - performs a safe shutdown. Waits for completion of current tasks
+   * - spawns a shutdown thread and returns immediately.
    */
   public void shutdown() {
     logger.warn("Shutting down AzkabanExecutorServer...");
@@ -546,8 +540,8 @@ public class AzkabanExecutorServer {
    * Note: This should be run in a separate thread.
    *
    * Shutdown the server. (blocking call)
-   *  - waits for jobs to finish
-   *  - doesn't accept any new jobs
+   * - waits for jobs to finish
+   * - doesn't accept any new jobs
    */
   private void shutdownInternal() {
     getFlowRunnerManager().shutdown();
@@ -557,11 +551,10 @@ public class AzkabanExecutorServer {
 
   /**
    * Shutdown the server now! (unsafe)
-   * @throws Exception
    */
   public void shutdownNow() throws Exception {
-    server.stop();
-    server.destroy();
+    this.server.stop();
+    this.server.destroy();
     getFlowRunnerManager().shutdownNow();
     close();
   }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/event/BlockingStatus.java b/azkaban-exec-server/src/main/java/azkaban/execapp/event/BlockingStatus.java
index eb10a92..a472044 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/event/BlockingStatus.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/event/BlockingStatus.java
@@ -19,32 +19,33 @@ package azkaban.execapp.event;
 import azkaban.executor.Status;
 
 public class BlockingStatus {
+
   private static final long WAIT_TIME = 5 * 60 * 1000;
   private final int execId;
   private final String jobId;
   private Status status;
 
-  public BlockingStatus(int execId, String jobId, Status initialStatus) {
+  public BlockingStatus(final int execId, final String jobId, final Status initialStatus) {
     this.execId = execId;
     this.jobId = jobId;
     this.status = initialStatus;
   }
 
   public Status blockOnFinishedStatus() {
-    if (status == null) {
+    if (this.status == null) {
       return null;
     }
 
-    while (!Status.isStatusFinished(status)) {
+    while (!Status.isStatusFinished(this.status)) {
       synchronized (this) {
         try {
           this.wait(WAIT_TIME);
-        } catch (InterruptedException e) {
+        } catch (final InterruptedException e) {
         }
       }
     }
 
-    return status;
+    return this.status;
   }
 
   public Status viewStatus() {
@@ -57,7 +58,7 @@ public class BlockingStatus {
     }
   }
 
-  public void changeStatus(Status status) {
+  public void changeStatus(final Status status) {
     synchronized (this) {
       this.status = status;
       if (Status.isStatusFinished(status)) {
@@ -67,10 +68,10 @@ public class BlockingStatus {
   }
 
   public int getExecId() {
-    return execId;
+    return this.execId;
   }
 
   public String getJobId() {
-    return jobId;
+    return this.jobId;
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/event/FlowWatcher.java b/azkaban-exec-server/src/main/java/azkaban/execapp/event/FlowWatcher.java
index 139cf61..3066ead 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/event/FlowWatcher.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/event/FlowWatcher.java
@@ -16,77 +16,73 @@
 
 package azkaban.execapp.event;
 
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.log4j.Logger;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableNode;
 import azkaban.executor.Status;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import org.apache.log4j.Logger;
 
 public abstract class FlowWatcher {
-  private Logger logger;
 
-  private int execId;
+  private final int execId;
+  private final Map<String, BlockingStatus> map =
+      new ConcurrentHashMap<>();
+  private Logger logger;
   private ExecutableFlow flow;
-  private Map<String, BlockingStatus> map =
-      new ConcurrentHashMap<String, BlockingStatus>();
   private boolean cancelWatch = false;
 
-  public FlowWatcher(int execId) {
+  public FlowWatcher(final int execId) {
     this.execId = execId;
   }
 
-  public void setFlow(ExecutableFlow flow) {
+  public void setFlow(final ExecutableFlow flow) {
     this.flow = flow;
   }
 
-  public void setLogger(Logger logger) {
-    this.logger = logger;
-  }
-
   protected Logger getLogger() {
     return this.logger;
   }
 
+  public void setLogger(final Logger logger) {
+    this.logger = logger;
+  }
+
   /**
    * Called to fire events to the JobRunner listeners
-   *
-   * @param jobId
    */
-  protected synchronized void handleJobStatusChange(String jobId, Status status) {
-    BlockingStatus block = map.get(jobId);
+  protected synchronized void handleJobStatusChange(final String jobId, final Status status) {
+    final BlockingStatus block = this.map.get(jobId);
     if (block != null) {
       block.changeStatus(status);
     }
   }
 
   public int getExecId() {
-    return execId;
+    return this.execId;
   }
 
-  public synchronized BlockingStatus getBlockingStatus(String jobId) {
-    if (cancelWatch) {
+  public synchronized BlockingStatus getBlockingStatus(final String jobId) {
+    if (this.cancelWatch) {
       return null;
     }
 
-    ExecutableNode node = flow.getExecutableNodePath(jobId);
+    final ExecutableNode node = this.flow.getExecutableNodePath(jobId);
     if (node == null) {
       return null;
     }
 
-    BlockingStatus blockingStatus = map.get(jobId);
+    BlockingStatus blockingStatus = this.map.get(jobId);
     if (blockingStatus == null) {
-      blockingStatus = new BlockingStatus(execId, jobId, node.getStatus());
-      map.put(jobId, blockingStatus);
+      blockingStatus = new BlockingStatus(this.execId, jobId, node.getStatus());
+      this.map.put(jobId, blockingStatus);
     }
 
     return blockingStatus;
   }
 
-  public Status peekStatus(String jobId) {
-    ExecutableNode node = flow.getExecutableNodePath(jobId);
+  public Status peekStatus(final String jobId) {
+    final ExecutableNode node = this.flow.getExecutableNodePath(jobId);
     if (node != null) {
       return node.getStatus();
     }
@@ -95,20 +91,20 @@ public abstract class FlowWatcher {
   }
 
   public synchronized void unblockAllWatches() {
-    logger.info("Unblock all watches on " + execId);
-    cancelWatch = true;
+    this.logger.info("Unblock all watches on " + this.execId);
+    this.cancelWatch = true;
 
-    for (BlockingStatus status : map.values()) {
-      logger.info("Unblocking " + status.getJobId());
+    for (final BlockingStatus status : this.map.values()) {
+      this.logger.info("Unblocking " + status.getJobId());
       status.changeStatus(Status.SKIPPED);
       status.unblock();
     }
 
-    logger.info("Successfully unblocked all watches on " + execId);
+    this.logger.info("Successfully unblocked all watches on " + this.execId);
   }
 
   public boolean isWatchCancelled() {
-    return cancelWatch;
+    return this.cancelWatch;
   }
 
   public abstract void stopWatcher();
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackManager.java b/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackManager.java
index 5662d03..7771b39 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackManager.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackManager.java
@@ -6,18 +6,6 @@ import static azkaban.jobcallback.JobCallbackStatusEnum.FAILURE;
 import static azkaban.jobcallback.JobCallbackStatusEnum.STARTED;
 import static azkaban.jobcallback.JobCallbackStatusEnum.SUCCESS;
 
-import java.net.InetAddress;
-import java.net.URL;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.List;
-import java.util.Map;
-import java.util.TimeZone;
-
-import org.apache.http.client.methods.HttpRequestBase;
-import org.apache.http.message.BasicHeader;
-import org.apache.log4j.Logger;
-
 import azkaban.event.Event;
 import azkaban.event.EventData;
 import azkaban.event.EventListener;
@@ -28,40 +16,64 @@ import azkaban.executor.Status;
 import azkaban.jobcallback.JobCallbackStatusEnum;
 import azkaban.utils.Props;
 import azkaban.utils.PropsUtils;
+import java.net.InetAddress;
+import java.net.URL;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.TimeZone;
+import org.apache.http.client.methods.HttpRequestBase;
+import org.apache.http.message.BasicHeader;
+import org.apache.log4j.Logger;
 
 /**
  * Responsible processing job callback properties on job status change events.
- * 
+ *
  * When job callback properties are specified, they will be converted to HTTP
  * calls to execute. The HTTP requests will be made in asynchronous mode so the
  * caller to the handleEvent method will not be block. In addition, the HTTP
  * calls will be configured to time appropriately for connection request,
  * creating connection, and socket timeout.
- * 
+ *
  * The HTTP request and response will be logged out the job's log for debugging
  * and traceability purpose.
- * 
- * @author hluu
  *
+ * @author hluu
  */
 public class JobCallbackManager implements EventListener {
 
   private static final Logger logger = Logger
       .getLogger(JobCallbackManager.class);
-
+  private static final JobCallbackStatusEnum[] ON_COMPLETION_JOB_CALLBACK_STATUS =
+      {SUCCESS, FAILURE, COMPLETED};
   private static boolean isInitialized = false;
   private static JobCallbackManager instance;
-
   private static int maxNumCallBack = 3;
+  private final JmxJobCallbackMBean callbackMbean;
+  private final String azkabanHostName;
+  private final SimpleDateFormat gmtDateFormatter;
 
-  private JmxJobCallbackMBean callbackMbean;
-  private String azkabanHostName;
-  private SimpleDateFormat gmtDateFormatter;
+  private JobCallbackManager(final Props props) {
+    maxNumCallBack = props.getInt("jobcallback.max_count", maxNumCallBack);
 
-  private static final JobCallbackStatusEnum[] ON_COMPLETION_JOB_CALLBACK_STATUS =
-      { SUCCESS, FAILURE, COMPLETED };
+    // initialize the request maker
+    JobCallbackRequestMaker.initialize(props);
+
+    this.callbackMbean =
+        new JmxJobCallback(JobCallbackRequestMaker.getInstance()
+            .getJobcallbackMetrics());
+
+    this.azkabanHostName = getAzkabanHostName(props);
+
+    this.gmtDateFormatter = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
+    this.gmtDateFormatter.setTimeZone(TimeZone.getTimeZone("GMT"));
+
+    logger.info("Initialization completed " + getClass().getName());
+    logger.info("azkabanHostName " + this.azkabanHostName);
+  }
 
-  public static void initialize(Props props) {
+  public static void initialize(final Props props) {
     if (isInitialized) {
       logger.info("Already initialized");
       return;
@@ -85,31 +97,12 @@ public class JobCallbackManager implements EventListener {
     return instance;
   }
 
-  private JobCallbackManager(Props props) {
-    maxNumCallBack = props.getInt("jobcallback.max_count", maxNumCallBack);
-
-    // initialize the request maker
-    JobCallbackRequestMaker.initialize(props);
-
-    callbackMbean =
-        new JmxJobCallback(JobCallbackRequestMaker.getInstance()
-            .getJobcallbackMetrics());
-
-    azkabanHostName = getAzkabanHostName(props);
-
-    gmtDateFormatter = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
-    gmtDateFormatter.setTimeZone(TimeZone.getTimeZone("GMT"));
-
-    logger.info("Initialization completed " + getClass().getName());
-    logger.info("azkabanHostName " + azkabanHostName);
-  }
-
   public JmxJobCallbackMBean getJmxJobCallbackMBean() {
-    return callbackMbean;
+    return this.callbackMbean;
   }
 
   @Override
-  public void handleEvent(Event event) {
+  public void handleEvent(final Event event) {
     if (!isInitialized) {
       return;
     }
@@ -121,9 +114,9 @@ public class JobCallbackManager implements EventListener {
         } else if (event.getType() == Event.Type.JOB_FINISHED) {
           processJobCallOnFinish(event);
         }
-      } catch (Throwable e) {
+      } catch (final Throwable e) {
         // Use job runner logger so user can see the issue in their job log
-        JobRunner jobRunner = (JobRunner) event.getRunner();
+        final JobRunner jobRunner = (JobRunner) event.getRunner();
         jobRunner.getLogger().error(
             "Encountered error while hanlding job callback event", e);
       }
@@ -134,9 +127,9 @@ public class JobCallbackManager implements EventListener {
 
   }
 
-  private void processJobCallOnFinish(Event event) {
-    JobRunner jobRunner = (JobRunner) event.getRunner();
-    EventData eventData = event.getData();
+  private void processJobCallOnFinish(final Event event) {
+    final JobRunner jobRunner = (JobRunner) event.getRunner();
+    final EventData eventData = event.getData();
 
     if (!JobCallbackUtil.isThereJobCallbackProperty(jobRunner.getProps(),
         ON_COMPLETION_JOB_CALLBACK_STATUS)) {
@@ -145,15 +138,15 @@ public class JobCallbackManager implements EventListener {
 
     // don't want to waste time resolving properties if there are no
     // callback properties to parse
-    Props props = PropsUtils.resolveProps(jobRunner.getProps());
+    final Props props = PropsUtils.resolveProps(jobRunner.getProps());
 
-    Map<String, String> contextInfo =
-        JobCallbackUtil.buildJobContextInfoMap(event, azkabanHostName);
+    final Map<String, String> contextInfo =
+        JobCallbackUtil.buildJobContextInfoMap(event, this.azkabanHostName);
 
     JobCallbackStatusEnum jobCallBackStatusEnum = null;
-    Logger jobLogger = jobRunner.getLogger();
+    final Logger jobLogger = jobRunner.getLogger();
 
-    Status jobStatus = eventData.getStatus();
+    final Status jobStatus = eventData.getStatus();
 
     if (jobStatus == Status.SUCCEEDED) {
 
@@ -169,15 +162,15 @@ public class JobCallbackManager implements EventListener {
       jobCallBackStatusEnum = null; // to be explicit
     }
 
-    String jobId = contextInfo.get(CONTEXT_JOB_TOKEN);
+    final String jobId = contextInfo.get(CONTEXT_JOB_TOKEN);
 
     if (jobCallBackStatusEnum != null) {
-      List<HttpRequestBase> jobCallbackHttpRequests =
+      final List<HttpRequestBase> jobCallbackHttpRequests =
           JobCallbackUtil.parseJobCallbackProperties(props,
               jobCallBackStatusEnum, contextInfo, maxNumCallBack, jobLogger);
 
       if (!jobCallbackHttpRequests.isEmpty()) {
-        String msg =
+        final String msg =
             String.format("Making %d job callbacks for status: %s",
                 jobCallbackHttpRequests.size(), jobCallBackStatusEnum.name());
         jobLogger.info(msg);
@@ -192,7 +185,7 @@ public class JobCallbackManager implements EventListener {
     }
 
     // for completed status
-    List<HttpRequestBase> httpRequestsForCompletedStatus =
+    final List<HttpRequestBase> httpRequestsForCompletedStatus =
         JobCallbackUtil.parseJobCallbackProperties(props, COMPLETED,
             contextInfo, maxNumCallBack, jobLogger);
 
@@ -209,25 +202,25 @@ public class JobCallbackManager implements EventListener {
     }
   }
 
-  private void processJobCallOnStart(Event event) {
-    JobRunner jobRunner = (JobRunner) event.getRunner();
+  private void processJobCallOnStart(final Event event) {
+    final JobRunner jobRunner = (JobRunner) event.getRunner();
 
     if (JobCallbackUtil.isThereJobCallbackProperty(jobRunner.getProps(),
         JobCallbackStatusEnum.STARTED)) {
 
       // don't want to waste time resolving properties if there are
       // callback properties to parse
-      Props props = PropsUtils.resolveProps(jobRunner.getProps());
+      final Props props = PropsUtils.resolveProps(jobRunner.getProps());
 
-      Map<String, String> contextInfo =
-          JobCallbackUtil.buildJobContextInfoMap(event, azkabanHostName);
+      final Map<String, String> contextInfo =
+          JobCallbackUtil.buildJobContextInfoMap(event, this.azkabanHostName);
 
-      List<HttpRequestBase> jobCallbackHttpRequests =
+      final List<HttpRequestBase> jobCallbackHttpRequests =
           JobCallbackUtil.parseJobCallbackProperties(props, STARTED,
               contextInfo, maxNumCallBack, jobRunner.getLogger());
 
-      String jobId = contextInfo.get(CONTEXT_JOB_TOKEN);
-      String msg =
+      final String jobId = contextInfo.get(CONTEXT_JOB_TOKEN);
+      final String msg =
           String.format("Making %d job callbacks for job %s for jobStatus: %s",
               jobCallbackHttpRequests.size(), jobId, STARTED.name());
 
@@ -240,34 +233,34 @@ public class JobCallbackManager implements EventListener {
     }
   }
 
-  private String getAzkabanHostName(Props props) {
-    String baseURL = props.get(JobRunner.AZKABAN_WEBSERVER_URL);
+  private String getAzkabanHostName(final Props props) {
+    final String baseURL = props.get(JobRunner.AZKABAN_WEBSERVER_URL);
     try {
       String hostName = InetAddress.getLocalHost().getHostName();
       if (baseURL != null) {
-        URL url = new URL(baseURL);
+        final URL url = new URL(baseURL);
         hostName = url.getHost() + ":" + url.getPort();
       }
       return hostName;
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new IllegalStateException(
           "Encountered while getting azkaban host name", e);
     }
   }
 
-  private void addDefaultHeaders(List<HttpRequestBase> httpRequests) {
+  private void addDefaultHeaders(final List<HttpRequestBase> httpRequests) {
     if (httpRequests == null) {
       return;
     }
 
-    SimpleDateFormat format =
+    final SimpleDateFormat format =
         new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
     format.setTimeZone(TimeZone.getTimeZone("GMT"));
 
-    for (HttpRequestBase httpRequest : httpRequests) {
-      httpRequest.addHeader(new BasicHeader("Date", gmtDateFormatter
+    for (final HttpRequestBase httpRequest : httpRequests) {
+      httpRequest.addHeader(new BasicHeader("Date", this.gmtDateFormatter
           .format(new Date())));
-      httpRequest.addHeader(new BasicHeader("Host", azkabanHostName));
+      httpRequest.addHeader(new BasicHeader("Host", this.azkabanHostName));
     }
 
   }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackRequestMaker.java b/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackRequestMaker.java
index 2a84151..070856b 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackRequestMaker.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackRequestMaker.java
@@ -1,11 +1,12 @@
 package azkaban.execapp.event;
 
-import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_CONNECTION_TIMEOUT;
 import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT;
+import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_CONNECTION_TIMEOUT;
 import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_RESPONSE_WAIT_TIMEOUT;
 import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_SOCKET_TIMEOUT;
 import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_THREAD_POOL_SIZE;
 
+import azkaban.utils.Props;
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
@@ -16,7 +17,6 @@ import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
-
 import org.apache.http.Header;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
@@ -32,8 +32,6 @@ import org.apache.http.impl.client.HttpClientBuilder;
 import org.apache.http.impl.client.HttpRequestFutureTask;
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Props;
-
 /**
  * Responsible for making the job callback HTTP requests.
  *
@@ -41,7 +39,6 @@ import azkaban.utils.Props;
  * using the given logger, which should be the job logger.
  *
  * @author hluu
- *
  */
 public class JobCallbackRequestMaker {
 
@@ -57,10 +54,46 @@ public class JobCallbackRequestMaker {
   private static JobCallbackRequestMaker instance;
   private static boolean isInitialized = false;
 
-  private FutureRequestExecutionService futureRequestExecutionService;
+  private final FutureRequestExecutionService futureRequestExecutionService;
   private int responseWaitTimeoutMS = -1;
 
-  public static void initialize(Props props) {
+  private JobCallbackRequestMaker(final Props props) {
+
+    final int connectionRequestTimeout =
+        props.getInt(JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT, DEFAULT_TIME_OUT_MS);
+
+    final int connectionTimeout = props.getInt(JOBCALLBACK_CONNECTION_TIMEOUT, DEFAULT_TIME_OUT_MS);
+
+    final int socketTimeout = props.getInt(JOBCALLBACK_SOCKET_TIMEOUT, DEFAULT_TIME_OUT_MS);
+
+    this.responseWaitTimeoutMS =
+        props.getInt(JOBCALLBACK_RESPONSE_WAIT_TIMEOUT, DEFAULT_RESPONSE_WAIT_TIME_OUT_MS);
+
+    logger.info("responseWaitTimeoutMS: " + this.responseWaitTimeoutMS);
+
+    final RequestConfig requestConfig =
+        RequestConfig.custom()
+            .setConnectionRequestTimeout(connectionRequestTimeout)
+            .setConnectTimeout(connectionTimeout)
+            .setSocketTimeout(socketTimeout).build();
+
+    logger.info("Global request configuration " + requestConfig.toString());
+
+    final HttpClient httpClient =
+        HttpClientBuilder.create().setDefaultRequestConfig(requestConfig)
+            .build();
+
+    final int jobCallbackThreadPoolSize =
+        props.getInt(JOBCALLBACK_THREAD_POOL_SIZE, DEFAULT_THREAD_POOL_SIZE);
+    logger.info("Jobcall thread pool size: " + jobCallbackThreadPoolSize);
+
+    final ExecutorService executorService =
+        Executors.newFixedThreadPool(jobCallbackThreadPoolSize);
+    this.futureRequestExecutionService =
+        new FutureRequestExecutionService(httpClient, executorService);
+  }
+
+  public static void initialize(final Props props) {
     if (props == null) {
       throw new NullPointerException("props argument can't be null");
     }
@@ -87,89 +120,53 @@ public class JobCallbackRequestMaker {
     return instance;
   }
 
-  private JobCallbackRequestMaker(Props props) {
-
-    int connectionRequestTimeout =
-        props.getInt(JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT, DEFAULT_TIME_OUT_MS);
-
-    int connectionTimeout = props.getInt(JOBCALLBACK_CONNECTION_TIMEOUT, DEFAULT_TIME_OUT_MS);
-
-    int socketTimeout = props.getInt(JOBCALLBACK_SOCKET_TIMEOUT, DEFAULT_TIME_OUT_MS);
-
-    responseWaitTimeoutMS =
-        props.getInt(JOBCALLBACK_RESPONSE_WAIT_TIMEOUT, DEFAULT_RESPONSE_WAIT_TIME_OUT_MS);
-
-    logger.info("responseWaitTimeoutMS: " + responseWaitTimeoutMS);
-
-    RequestConfig requestConfig =
-        RequestConfig.custom()
-            .setConnectionRequestTimeout(connectionRequestTimeout)
-            .setConnectTimeout(connectionTimeout)
-            .setSocketTimeout(socketTimeout).build();
-
-    logger.info("Global request configuration " + requestConfig.toString());
-
-    HttpClient httpClient =
-        HttpClientBuilder.create().setDefaultRequestConfig(requestConfig)
-            .build();
-
-    int jobCallbackThreadPoolSize =
-        props.getInt(JOBCALLBACK_THREAD_POOL_SIZE, DEFAULT_THREAD_POOL_SIZE);
-    logger.info("Jobcall thread pool size: " + jobCallbackThreadPoolSize);
-
-    ExecutorService executorService =
-        Executors.newFixedThreadPool(jobCallbackThreadPoolSize);
-    futureRequestExecutionService =
-        new FutureRequestExecutionService(httpClient, executorService);
-  }
-
   public FutureRequestExecutionMetrics getJobcallbackMetrics() {
-    return futureRequestExecutionService.metrics();
+    return this.futureRequestExecutionService.metrics();
   }
 
-  public void makeHttpRequest(String jobId, Logger logger,
-      List<HttpRequestBase> httpRequestList) {
+  public void makeHttpRequest(final String jobId, final Logger logger,
+      final List<HttpRequestBase> httpRequestList) {
 
     if (httpRequestList == null || httpRequestList.isEmpty()) {
       logger.info("No HTTP requests to make");
       return;
     }
 
-    for (HttpRequestBase httpRequest : httpRequestList) {
+    for (final HttpRequestBase httpRequest : httpRequestList) {
 
       logger.info("Job callback http request: " + httpRequest.toString());
       logger.info("headers [");
-      for (Header header : httpRequest.getAllHeaders()) {
+      for (final Header header : httpRequest.getAllHeaders()) {
         logger.info(String.format("  %s : %s", header.getName(),
             header.getValue()));
       }
       logger.info("]");
 
-      HttpRequestFutureTask<Integer> task =
-          futureRequestExecutionService.execute(httpRequest,
+      final HttpRequestFutureTask<Integer> task =
+          this.futureRequestExecutionService.execute(httpRequest,
               HttpClientContext.create(), new LoggingResponseHandler(logger));
 
       try {
         // get with timeout
-        Integer statusCode =
-            task.get(responseWaitTimeoutMS, TimeUnit.MILLISECONDS);
+        final Integer statusCode =
+            task.get(this.responseWaitTimeoutMS, TimeUnit.MILLISECONDS);
 
         logger.info("http callback status code: " + statusCode);
-      } catch (TimeoutException timeOutEx) {
+      } catch (final TimeoutException timeOutEx) {
         logger
             .warn("Job callback target took longer "
-                + (responseWaitTimeoutMS / 1000) + " seconds to respond",
+                    + (this.responseWaitTimeoutMS / 1000) + " seconds to respond",
                 timeOutEx);
-      } catch (ExecutionException ee) {
+      } catch (final ExecutionException ee) {
         if (ee.getCause() instanceof SocketTimeoutException) {
           logger.warn("Job callback target took longer "
-              + (responseWaitTimeoutMS / 1000) + " seconds to respond", ee);
+              + (this.responseWaitTimeoutMS / 1000) + " seconds to respond", ee);
         } else {
           logger.warn(
               "Encountered error while waiting for job callback to complete",
               ee);
         }
-      } catch (Throwable e) {
+      } catch (final Throwable e) {
         logger.warn(
             "Encountered error while waiting for job callback to complete", e);
       }
@@ -181,14 +178,13 @@ public class JobCallbackRequestMaker {
    * instance
    *
    * @author hluu
-   *
    */
   private static final class LoggingResponseHandler implements
       ResponseHandler<Integer> {
 
-    private Logger logger;
+    private final Logger logger;
 
-    public LoggingResponseHandler(Logger logger) {
+    public LoggingResponseHandler(final Logger logger) {
       if (logger == null) {
         throw new NullPointerException("Argument logger can't be null");
       }
@@ -199,11 +195,11 @@ public class JobCallbackRequestMaker {
     public Integer handleResponse(final HttpResponse response)
         throws ClientProtocolException, IOException {
 
-      int statusCode = response.getStatusLine().getStatusCode();
+      final int statusCode = response.getStatusLine().getStatusCode();
       BufferedReader bufferedReader = null;
 
       try {
-        HttpEntity responseEntity = response.getEntity();
+        final HttpEntity responseEntity = response.getEntity();
         if (responseEntity != null) {
           bufferedReader =
               new BufferedReader(new InputStreamReader(
@@ -211,27 +207,27 @@ public class JobCallbackRequestMaker {
 
           String line = "";
           int lineCount = 0;
-          logger.info("HTTP response [");
+          this.logger.info("HTTP response [");
           while ((line = bufferedReader.readLine()) != null) {
-            logger.info(line);
+            this.logger.info(line);
             lineCount++;
             if (lineCount > MAX_RESPONSE_LINE_TO_PRINT) {
               break;
             }
           }
-          logger.info("]");
+          this.logger.info("]");
         } else {
-          logger.info("No response");
+          this.logger.info("No response");
         }
 
-      } catch (Throwable t) {
-        logger.warn(
+      } catch (final Throwable t) {
+        this.logger.warn(
             "Encountered error while logging out job callback response", t);
       } finally {
         if (bufferedReader != null) {
           try {
             bufferedReader.close();
-          } catch (IOException ex) {
+          } catch (final IOException ex) {
             // don't care
           }
         }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackUtil.java b/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackUtil.java
index e06a306..9d74d72 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackUtil.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/event/JobCallbackUtil.java
@@ -1,8 +1,12 @@
 package azkaban.execapp.event;
 
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_EXECUTION_ID_TOKEN;
-import static azkaban.jobcallback.JobCallbackConstants.FIRST_JOB_CALLBACK_URL_TEMPLATE;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_FLOW_TOKEN;
+import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_STATUS_TOKEN;
+import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_TOKEN;
+import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_PROJECT_TOKEN;
+import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_SERVER_TOKEN;
+import static azkaban.jobcallback.JobCallbackConstants.FIRST_JOB_CALLBACK_URL_TEMPLATE;
 import static azkaban.jobcallback.JobCallbackConstants.HEADER_ELEMENT_DELIMITER;
 import static azkaban.jobcallback.JobCallbackConstants.HEADER_NAME_VALUE_DELIMITER;
 import static azkaban.jobcallback.JobCallbackConstants.HTTP_GET;
@@ -11,13 +15,15 @@ import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_BODY_TEMPLAT
 import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_REQUEST_HEADERS_TEMPLATE;
 import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_REQUEST_METHOD_TEMPLATE;
 import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_URL_TEMPLATE;
-import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_STATUS_TOKEN;
-import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_TOKEN;
-import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_PROJECT_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.SEQUENCE_TOKEN;
-import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_SERVER_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.STATUS_TOKEN;
 
+import azkaban.event.Event;
+import azkaban.event.EventData;
+import azkaban.execapp.JobRunner;
+import azkaban.executor.ExecutableNode;
+import azkaban.jobcallback.JobCallbackStatusEnum;
+import azkaban.utils.Props;
 import java.io.UnsupportedEncodingException;
 import java.net.URLEncoder;
 import java.util.ArrayList;
@@ -26,7 +32,6 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.regex.Pattern;
-
 import org.apache.http.Header;
 import org.apache.http.client.methods.HttpGet;
 import org.apache.http.client.methods.HttpPost;
@@ -35,22 +40,16 @@ import org.apache.http.entity.StringEntity;
 import org.apache.http.message.BasicHeader;
 import org.apache.log4j.Logger;
 
-import azkaban.event.Event;
-import azkaban.event.EventData;
-import azkaban.execapp.JobRunner;
-import azkaban.executor.ExecutableNode;
-import azkaban.jobcallback.JobCallbackStatusEnum;
-import azkaban.utils.Props;
-
 public class JobCallbackUtil {
+
   private static final Logger logger = Logger.getLogger(JobCallbackUtil.class);
 
-  private static Map<JobCallbackStatusEnum, String> firstJobcallbackPropertyMap =
-      new HashMap<JobCallbackStatusEnum, String>(
+  private static final Map<JobCallbackStatusEnum, String> firstJobcallbackPropertyMap =
+      new HashMap<>(
           JobCallbackStatusEnum.values().length);
 
   static {
-    for (JobCallbackStatusEnum statusEnum : JobCallbackStatusEnum.values()) {
+    for (final JobCallbackStatusEnum statusEnum : JobCallbackStatusEnum.values()) {
       firstJobcallbackPropertyMap.put(statusEnum,
           replaceStatusToken(FIRST_JOB_CALLBACK_URL_TEMPLATE, statusEnum));
     }
@@ -59,30 +58,28 @@ public class JobCallbackUtil {
   /**
    * Use to quickly determine if there is a job callback related property in the
    * Props.
-   * 
-   * @param props
-   * @param status
+   *
    * @return true if there is job callback related property
    */
-  public static boolean isThereJobCallbackProperty(Props props,
-      JobCallbackStatusEnum status) {
+  public static boolean isThereJobCallbackProperty(final Props props,
+      final JobCallbackStatusEnum status) {
 
     if (props == null || status == null) {
       throw new NullPointerException("One of the argument is null");
     }
 
-    String jobCallBackUrl = firstJobcallbackPropertyMap.get(status);
+    final String jobCallBackUrl = firstJobcallbackPropertyMap.get(status);
     return props.containsKey(jobCallBackUrl);
   }
 
-  public static boolean isThereJobCallbackProperty(Props props,
-      JobCallbackStatusEnum... jobStatuses) {
+  public static boolean isThereJobCallbackProperty(final Props props,
+      final JobCallbackStatusEnum... jobStatuses) {
 
     if (props == null || jobStatuses == null) {
       throw new NullPointerException("One of the argument is null");
     }
 
-    for (JobCallbackStatusEnum jobStatus : jobStatuses) {
+    for (final JobCallbackStatusEnum jobStatus : jobStatuses) {
       if (JobCallbackUtil.isThereJobCallbackProperty(props, jobStatus)) {
         return true;
       }
@@ -90,9 +87,9 @@ public class JobCallbackUtil {
     return false;
   }
 
-  public static List<HttpRequestBase> parseJobCallbackProperties(Props props,
-      JobCallbackStatusEnum status, Map<String, String> contextInfo,
-      int maxNumCallback) {
+  public static List<HttpRequestBase> parseJobCallbackProperties(final Props props,
+      final JobCallbackStatusEnum status, final Map<String, String> contextInfo,
+      final int maxNumCallback) {
 
     return parseJobCallbackProperties(props, status, contextInfo,
         maxNumCallback, logger);
@@ -101,17 +98,14 @@ public class JobCallbackUtil {
   /**
    * This method is responsible for parsing job call URL properties and convert
    * them into a list of HttpRequestBase, which callers can use to execute.
-   * 
+   *
    * In addition to parsing, it will also replace the tokens with actual values.
-   * 
-   * @param props
-   * @param status
-   * @param event
+   *
    * @return List<HttpRequestBase> - empty if no job callback related properties
    */
-  public static List<HttpRequestBase> parseJobCallbackProperties(Props props,
-      JobCallbackStatusEnum status, Map<String, String> contextInfo,
-      int maxNumCallback, Logger privateLogger) {
+  public static List<HttpRequestBase> parseJobCallbackProperties(final Props props,
+      final JobCallbackStatusEnum status, final Map<String, String> contextInfo,
+      final int maxNumCallback, final Logger privateLogger) {
     String callbackUrl = null;
 
     if (!isThereJobCallbackProperty(props, status)) {
@@ -119,25 +113,25 @@ public class JobCallbackUtil {
       return Collections.emptyList();
     }
 
-    List<HttpRequestBase> result = new ArrayList<HttpRequestBase>();
+    final List<HttpRequestBase> result = new ArrayList<>();
 
     // replace property templates with status
-    String jobCallBackUrlKey =
+    final String jobCallBackUrlKey =
         replaceStatusToken(JOB_CALLBACK_URL_TEMPLATE, status);
 
-    String requestMethod =
+    final String requestMethod =
         replaceStatusToken(JOB_CALLBACK_REQUEST_METHOD_TEMPLATE, status);
 
-    String httpBodyKey = replaceStatusToken(JOB_CALLBACK_BODY_TEMPLATE, status);
+    final String httpBodyKey = replaceStatusToken(JOB_CALLBACK_BODY_TEMPLATE, status);
 
-    String headersKey =
+    final String headersKey =
         replaceStatusToken(JOB_CALLBACK_REQUEST_HEADERS_TEMPLATE, status);
 
     for (int sequence = 1; sequence <= maxNumCallback; sequence++) {
       HttpRequestBase httpRequest = null;
-      String sequenceStr = Integer.toString(sequence);
+      final String sequenceStr = Integer.toString(sequence);
       // callback url
-      String callbackUrlKey =
+      final String callbackUrlKey =
           jobCallBackUrlKey.replace(SEQUENCE_TOKEN, sequenceStr);
 
       callbackUrl = props.get(callbackUrlKey);
@@ -145,17 +139,17 @@ public class JobCallbackUtil {
         // no more needs to done
         break;
       } else {
-        String callbackUrlWithTokenReplaced =
+        final String callbackUrlWithTokenReplaced =
             replaceTokens(callbackUrl, contextInfo, true);
 
-        String requestMethodKey =
+        final String requestMethodKey =
             requestMethod.replace(SEQUENCE_TOKEN, sequenceStr);
 
-        String method = props.getString(requestMethodKey, HTTP_GET);
+        final String method = props.getString(requestMethodKey, HTTP_GET);
 
         if (HTTP_POST.equals(method)) {
-          String postBodyKey = httpBodyKey.replace(SEQUENCE_TOKEN, sequenceStr);
-          String httpBodyValue = props.get(postBodyKey);
+          final String postBodyKey = httpBodyKey.replace(SEQUENCE_TOKEN, sequenceStr);
+          final String httpBodyValue = props.get(postBodyKey);
           if (httpBodyValue == null) {
             // missing body for POST, not good
             // update the wiki about skipping callback url if body is missing
@@ -164,8 +158,8 @@ public class JobCallbackUtil {
                 + contextInfo.get(CONTEXT_JOB_TOKEN));
           } else {
             // put together an URL
-            HttpPost httpPost = new HttpPost(callbackUrlWithTokenReplaced);
-            String postActualBody =
+            final HttpPost httpPost = new HttpPost(callbackUrlWithTokenReplaced);
+            final String postActualBody =
                 replaceTokens(httpBodyValue, contextInfo, false);
             privateLogger.info("postActualBody: " + postActualBody);
             httpPost.setEntity(createStringEntity(postActualBody));
@@ -179,11 +173,11 @@ public class JobCallbackUtil {
               + ". Only POST and GET are supported");
         }
 
-        String headersKeyPerSequence =
+        final String headersKeyPerSequence =
             headersKey.replace(SEQUENCE_TOKEN, sequenceStr);
-        String headersValue = props.get(headersKeyPerSequence);
+        final String headersValue = props.get(headersKeyPerSequence);
         privateLogger.info("headers: " + headersValue);
-        Header[] headers = parseHttpHeaders(headersValue);
+        final Header[] headers = parseHttpHeaders(headersValue);
         if (headers != null) {
           httpRequest.setHeaders(headers);
           privateLogger.info("# of headers found: " + headers.length);
@@ -196,20 +190,19 @@ public class JobCallbackUtil {
 
   /**
    * Parse headers
-   * 
-   * @param headers
+   *
    * @return null if headers is null or empty
    */
-  public static Header[] parseHttpHeaders(String headers) {
+  public static Header[] parseHttpHeaders(final String headers) {
     if (headers == null || headers.length() == 0) {
       return null;
     }
 
-    String[] headerArray = headers.split(HEADER_ELEMENT_DELIMITER);
-    List<Header> headerList = new ArrayList<Header>(headerArray.length);
+    final String[] headerArray = headers.split(HEADER_ELEMENT_DELIMITER);
+    final List<Header> headerList = new ArrayList<>(headerArray.length);
     for (int i = 0; i < headerArray.length; i++) {
-      String headerPair = headerArray[i];
-      int index = headerPair.indexOf(HEADER_NAME_VALUE_DELIMITER);
+      final String headerPair = headerArray[i];
+      final int index = headerPair.indexOf(HEADER_NAME_VALUE_DELIMITER);
       if (index != -1) {
         headerList.add(new BasicHeader(headerPair.substring(0, index),
             headerPair.substring(index + 1)));
@@ -219,15 +212,15 @@ public class JobCallbackUtil {
     return headerList.toArray(new BasicHeader[0]);
   }
 
-  private static String replaceStatusToken(String template,
-      JobCallbackStatusEnum status) {
+  private static String replaceStatusToken(final String template,
+      final JobCallbackStatusEnum status) {
     return template.replaceFirst(STATUS_TOKEN, status.name().toLowerCase());
   }
 
-  private static StringEntity createStringEntity(String str) {
+  private static StringEntity createStringEntity(final String str) {
     try {
       return new StringEntity(str);
-    } catch (UnsupportedEncodingException e) {
+    } catch (final UnsupportedEncodingException e) {
       throw new RuntimeException("Encoding not supported", e);
     }
   }
@@ -235,24 +228,23 @@ public class JobCallbackUtil {
   /**
    * This method takes the job context info. and put the values into a map with
    * keys as the tokens.
-   * 
-   * @param event
+   *
    * @return Map<String,String>
    */
-  public static Map<String, String> buildJobContextInfoMap(Event event,
-      String server) {
+  public static Map<String, String> buildJobContextInfoMap(final Event event,
+      final String server) {
 
     if (event.getRunner() instanceof JobRunner) {
-      JobRunner jobRunner = (JobRunner) event.getRunner();
-      ExecutableNode node = jobRunner.getNode();
-      EventData eventData = event.getData();
-      String projectName = node.getParentFlow().getProjectName();
-      String flowName = node.getParentFlow().getFlowId();
-      String executionId =
+      final JobRunner jobRunner = (JobRunner) event.getRunner();
+      final ExecutableNode node = jobRunner.getNode();
+      final EventData eventData = event.getData();
+      final String projectName = node.getParentFlow().getProjectName();
+      final String flowName = node.getParentFlow().getFlowId();
+      final String executionId =
           String.valueOf(node.getParentFlow().getExecutionId());
-      String jobId = node.getId();
+      final String jobId = node.getId();
 
-      Map<String, String> result = new HashMap<String, String>();
+      final Map<String, String> result = new HashMap<>();
       result.put(CONTEXT_SERVER_TOKEN, server);
       result.put(CONTEXT_PROJECT_TOKEN, projectName);
       result.put(CONTEXT_FLOW_TOKEN, flowName);
@@ -277,14 +269,12 @@ public class JobCallbackUtil {
   /**
    * Replace the supported tokens in the URL with values in the contextInfo.
    * This will also make sure the values are HTTP encoded.
-   * 
-   * @param value
-   * @param contextInfo
+   *
    * @param withEncoding - whether the token values will be HTTP encoded
    * @return String - value with tokens replaced with values
    */
-  public static String replaceTokens(String value,
-      Map<String, String> contextInfo, boolean withEncoding) {
+  public static String replaceTokens(final String value,
+      final Map<String, String> contextInfo, final boolean withEncoding) {
 
     String result = value;
     String tokenValue =
@@ -312,13 +302,13 @@ public class JobCallbackUtil {
     return result;
   }
 
-  private static String encodeQueryParam(String str, boolean withEncoding) {
+  private static String encodeQueryParam(final String str, final boolean withEncoding) {
     if (!withEncoding) {
       return str;
     }
     try {
       return URLEncoder.encode(str, "UTF-8");
-    } catch (UnsupportedEncodingException e) {
+    } catch (final UnsupportedEncodingException e) {
       throw new IllegalArgumentException(
           "Encountered problem during encoding:", e);
     }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java b/azkaban-exec-server/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java
index a5776f2..1376249 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/event/LocalFlowWatcher.java
@@ -25,48 +25,50 @@ import azkaban.execapp.JobRunner;
 import azkaban.executor.ExecutableNode;
 
 public class LocalFlowWatcher extends FlowWatcher {
-  private LocalFlowWatcherListener watcherListener;
+
+  private final LocalFlowWatcherListener watcherListener;
   private FlowRunner runner;
   private boolean isShutdown = false;
 
-  public LocalFlowWatcher(FlowRunner runner) {
+  public LocalFlowWatcher(final FlowRunner runner) {
     super(runner.getExecutableFlow().getExecutionId());
     super.setFlow(runner.getExecutableFlow());
 
-    watcherListener = new LocalFlowWatcherListener();
+    this.watcherListener = new LocalFlowWatcherListener();
     this.runner = runner;
-    runner.addListener(watcherListener);
+    runner.addListener(this.watcherListener);
   }
 
   @Override
   public void stopWatcher() {
     // Just freeing stuff
-    if (isShutdown) {
+    if (this.isShutdown) {
       return;
     }
 
-    isShutdown = true;
-    runner.removeListener(watcherListener);
-    runner = null;
+    this.isShutdown = true;
+    this.runner.removeListener(this.watcherListener);
+    this.runner = null;
 
     getLogger().info("Stopping watcher, and unblocking pipeline");
     super.unblockAllWatches();
   }
 
   public class LocalFlowWatcherListener implements EventListener {
+
     @Override
-    public void handleEvent(Event event) {
+    public void handleEvent(final Event event) {
       if (event.getType() == Type.JOB_FINISHED) {
         if (event.getRunner() instanceof FlowRunner) {
           // The flow runner will finish a job without it running
-          EventData eventData = event.getData();
+          final EventData eventData = event.getData();
           if (eventData.getNestedId() != null) {
             handleJobStatusChange(eventData.getNestedId(), eventData.getStatus());
           }
         } else if (event.getRunner() instanceof JobRunner) {
           // A job runner is finished
-          JobRunner runner = (JobRunner) event.getRunner();
-          ExecutableNode node = runner.getNode();
+          final JobRunner runner = (JobRunner) event.getRunner();
+          final ExecutableNode node = runner.getNode();
           System.out.println(node + " looks like " + node.getStatus());
           handleJobStatusChange(node.getNestedId(), node.getStatus());
         }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java b/azkaban-exec-server/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java
index 9dd50c3..2c41216 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/event/RemoteFlowWatcher.java
@@ -16,16 +16,16 @@
 
 package azkaban.execapp.event;
 
-import java.util.ArrayList;
-import java.util.Map;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableNode;
 import azkaban.executor.ExecutorLoader;
 import azkaban.executor.ExecutorManagerException;
 import azkaban.executor.Status;
+import java.util.ArrayList;
+import java.util.Map;
 
 public class RemoteFlowWatcher extends FlowWatcher {
+
   private final static long CHECK_INTERVAL_MS = 60 * 1000;
 
   private int execId;
@@ -37,89 +37,91 @@ public class RemoteFlowWatcher extends FlowWatcher {
   // Every minute
   private long checkIntervalMs = CHECK_INTERVAL_MS;
 
-  public RemoteFlowWatcher(int execId, ExecutorLoader loader) {
+  public RemoteFlowWatcher(final int execId, final ExecutorLoader loader) {
     this(execId, loader, CHECK_INTERVAL_MS);
   }
 
-  public RemoteFlowWatcher(int execId, ExecutorLoader loader, long interval) {
+  public RemoteFlowWatcher(final int execId, final ExecutorLoader loader, final long interval) {
     super(execId);
-    checkIntervalMs = interval;
+    this.checkIntervalMs = interval;
 
     try {
-      flow = loader.fetchExecutableFlow(execId);
-    } catch (ExecutorManagerException e) {
+      this.flow = loader.fetchExecutableFlow(execId);
+    } catch (final ExecutorManagerException e) {
       return;
     }
 
-    super.setFlow(flow);
+    super.setFlow(this.flow);
     this.loader = loader;
     this.execId = execId;
-    if (flow != null) {
+    if (this.flow != null) {
       this.thread = new RemoteUpdaterThread();
       this.thread.setName("Remote-watcher-flow-" + execId);
       this.thread.start();
     }
   }
 
+  @Override
+  public synchronized void stopWatcher() {
+    if (this.isShutdown) {
+      return;
+    }
+    this.isShutdown = true;
+    if (this.thread != null) {
+      this.thread.interrupt();
+    }
+    super.unblockAllWatches();
+    this.loader = null;
+    this.flow = null;
+  }
+
   private class RemoteUpdaterThread extends Thread {
+
     @Override
     public void run() {
       do {
         ExecutableFlow updateFlow = null;
         try {
-          updateFlow = loader.fetchExecutableFlow(execId);
-        } catch (ExecutorManagerException e) {
+          updateFlow = RemoteFlowWatcher.this.loader.fetchExecutableFlow(
+              RemoteFlowWatcher.this.execId);
+        } catch (final ExecutorManagerException e) {
           e.printStackTrace();
-          isShutdown = true;
+          RemoteFlowWatcher.this.isShutdown = true;
         }
 
         long updateTime = 0;
-        if (flow == null) {
-          flow = updateFlow;
+        if (RemoteFlowWatcher.this.flow == null) {
+          RemoteFlowWatcher.this.flow = updateFlow;
         } else {
-          Map<String, Object> updateData =
+          final Map<String, Object> updateData =
               updateFlow.toUpdateObject(updateTime);
-          ArrayList<ExecutableNode> updatedNodes =
-              new ArrayList<ExecutableNode>();
-          flow.applyUpdateObject(updateData, updatedNodes);
+          final ArrayList<ExecutableNode> updatedNodes =
+              new ArrayList<>();
+          RemoteFlowWatcher.this.flow.applyUpdateObject(updateData, updatedNodes);
 
-          flow.setStatus(updateFlow.getStatus());
-          flow.setEndTime(updateFlow.getEndTime());
-          flow.setUpdateTime(updateFlow.getUpdateTime());
+          RemoteFlowWatcher.this.flow.setStatus(updateFlow.getStatus());
+          RemoteFlowWatcher.this.flow.setEndTime(updateFlow.getEndTime());
+          RemoteFlowWatcher.this.flow.setUpdateTime(updateFlow.getUpdateTime());
 
-          for (ExecutableNode node : updatedNodes) {
+          for (final ExecutableNode node : updatedNodes) {
             handleJobStatusChange(node.getNestedId(), node.getStatus());
           }
 
-          updateTime = flow.getUpdateTime();
+          updateTime = RemoteFlowWatcher.this.flow.getUpdateTime();
         }
 
-        if (Status.isStatusFinished(flow.getStatus())) {
-          isShutdown = true;
+        if (Status.isStatusFinished(RemoteFlowWatcher.this.flow.getStatus())) {
+          RemoteFlowWatcher.this.isShutdown = true;
         } else {
           synchronized (this) {
             try {
-              wait(checkIntervalMs);
-            } catch (InterruptedException e) {
+              wait(RemoteFlowWatcher.this.checkIntervalMs);
+            } catch (final InterruptedException e) {
             }
           }
         }
-      } while (!isShutdown);
+      } while (!RemoteFlowWatcher.this.isShutdown);
     }
 
   }
-
-  @Override
-  public synchronized void stopWatcher() {
-    if (isShutdown) {
-      return;
-    }
-    isShutdown = true;
-    if (thread != null) {
-      thread.interrupt();
-    }
-    super.unblockAllWatches();
-    loader = null;
-    flow = null;
-  }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/ExecMetrics.java b/azkaban-exec-server/src/main/java/azkaban/execapp/ExecMetrics.java
index d3455fe..9d6798b 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/ExecMetrics.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/ExecMetrics.java
@@ -18,7 +18,6 @@ package azkaban.execapp;
 
 import azkaban.metrics.MetricsManager;
 import azkaban.metrics.MetricsUtility;
-
 import com.codahale.metrics.MetricRegistry;
 
 /**
@@ -30,7 +29,7 @@ public enum ExecMetrics {
   private final MetricRegistry registry;
 
   ExecMetrics() {
-    registry = MetricsManager.INSTANCE.getRegistry();
+    this.registry = MetricsManager.INSTANCE.getRegistry();
     setupStaticMetrics();
   }
 
@@ -38,8 +37,10 @@ public enum ExecMetrics {
 
   }
 
-  public void addFlowRunnerManagerMetrics(FlowRunnerManager flowRunnerManager) {
-    MetricsUtility.addGauge("EXEC-NumRunningFlows", registry, flowRunnerManager::getNumRunningFlows);
-    MetricsUtility.addGauge("EXEC-NumQueuedFlows", registry, flowRunnerManager::getNumQueuedFlows);
+  public void addFlowRunnerManagerMetrics(final FlowRunnerManager flowRunnerManager) {
+    MetricsUtility
+        .addGauge("EXEC-NumRunningFlows", this.registry, flowRunnerManager::getNumRunningFlows);
+    MetricsUtility
+        .addGauge("EXEC-NumQueuedFlows", this.registry, flowRunnerManager::getNumQueuedFlows);
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/ExecutorServlet.java b/azkaban-exec-server/src/main/java/azkaban/execapp/ExecutorServlet.java
index 2e6bdb0..58297e0 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/ExecutorServlet.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/ExecutorServlet.java
@@ -16,44 +16,39 @@
 
 package azkaban.execapp;
 
-import com.google.common.base.Preconditions;
+import static java.util.Objects.requireNonNull;
 
+import azkaban.Constants;
+import azkaban.executor.ConnectorParams;
+import azkaban.executor.ExecutableFlowBase;
+import azkaban.executor.Executor;
+import azkaban.executor.ExecutorLoader;
+import azkaban.executor.ExecutorManagerException;
+import azkaban.utils.FileIOUtils.JobMetaData;
+import azkaban.utils.FileIOUtils.LogData;
+import azkaban.utils.JSONUtils;
+import com.google.common.base.Preconditions;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import javax.servlet.ServletConfig;
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
 import org.apache.log4j.Logger;
-
 import org.codehaus.jackson.map.ObjectMapper;
 
-import azkaban.Constants;
-import azkaban.executor.ConnectorParams;
-import azkaban.executor.ExecutableFlowBase;
-import azkaban.executor.Executor;
-import azkaban.executor.ExecutorLoader;
-import azkaban.executor.ExecutorManagerException;
-import azkaban.utils.FileIOUtils.JobMetaData;
-import azkaban.utils.FileIOUtils.LogData;
-import azkaban.utils.JSONUtils;
-
-import static java.util.Objects.requireNonNull;
-
 
 public class ExecutorServlet extends HttpServlet implements ConnectorParams {
+
+  public static final String JSON_MIME_TYPE = "application/json";
   private static final long serialVersionUID = 1L;
   private static final Logger logger = Logger.getLogger(ExecutorServlet.class
       .getName());
-  public static final String JSON_MIME_TYPE = "application/json";
-
   private AzkabanExecutorServer application;
   private FlowRunnerManager flowRunnerManager;
 
@@ -62,38 +57,38 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
   }
 
   @Override
-  public void init(ServletConfig config) throws ServletException {
-    application =
+  public void init(final ServletConfig config) throws ServletException {
+    this.application =
         (AzkabanExecutorServer) config.getServletContext().getAttribute(
             Constants.AZKABAN_SERVLET_CONTEXT_KEY);
 
-    if (application == null) {
+    if (this.application == null) {
       throw new IllegalStateException(
           "No batch application is defined in the servlet context!");
     }
 
-    flowRunnerManager = application.getFlowRunnerManager();
+    this.flowRunnerManager = this.application.getFlowRunnerManager();
   }
 
-  protected void writeJSON(HttpServletResponse resp, Object obj)
+  protected void writeJSON(final HttpServletResponse resp, final Object obj)
       throws IOException {
     resp.setContentType(JSON_MIME_TYPE);
-    ObjectMapper mapper = new ObjectMapper();
-    OutputStream stream = resp.getOutputStream();
+    final ObjectMapper mapper = new ObjectMapper();
+    final OutputStream stream = resp.getOutputStream();
     mapper.writeValue(stream, obj);
   }
 
   @Override
-  public void doGet(HttpServletRequest req, HttpServletResponse resp)
+  public void doGet(final HttpServletRequest req, final HttpServletResponse resp)
       throws ServletException, IOException {
-    HashMap<String, Object> respMap = new HashMap<String, Object>();
+    final HashMap<String, Object> respMap = new HashMap<>();
     // logger.info("ExecutorServer called by " + req.getRemoteAddr());
     try {
       if (!hasParam(req, ACTION_PARAM)) {
         logger.error("Parameter action not set");
         respMap.put("error", "Parameter action not set");
       } else {
-        String action = getParam(req, ACTION_PARAM);
+        final String action = getParam(req, ACTION_PARAM);
         if (action.equals(UPDATE_ACTION)) {
           // logger.info("Updated called");
           handleAjaxUpdateRequest(req, respMap);
@@ -114,8 +109,8 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
         } else if (action.equals(SHUTDOWN)) {
           shutdown(respMap);
         } else {
-          int execid = Integer.parseInt(getParam(req, EXECID_PARAM));
-          String user = getParam(req, USER_PARAM, null);
+          final int execid = Integer.parseInt(getParam(req, EXECID_PARAM));
+          final String user = getParam(req, USER_PARAM, null);
 
           logger.info("User " + user + " has called action " + action + " on "
               + execid);
@@ -147,7 +142,7 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
           }
         }
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
@@ -155,113 +150,112 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
     resp.flushBuffer();
   }
 
-  private void handleModifyExecutionRequest(Map<String, Object> respMap,
-      int execId, String user, HttpServletRequest req) throws ServletException {
+  private void handleModifyExecutionRequest(final Map<String, Object> respMap,
+      final int execId, final String user, final HttpServletRequest req) throws ServletException {
     if (!hasParam(req, MODIFY_EXECUTION_ACTION_TYPE)) {
       respMap.put(RESPONSE_ERROR, "Modification type not set.");
     }
-    String modificationType = getParam(req, MODIFY_EXECUTION_ACTION_TYPE);
+    final String modificationType = getParam(req, MODIFY_EXECUTION_ACTION_TYPE);
 
     try {
       if (MODIFY_RETRY_FAILURES.equals(modificationType)) {
-        flowRunnerManager.retryFailures(execId, user);
+        this.flowRunnerManager.retryFailures(execId, user);
       }
-    } catch (ExecutorManagerException e) {
+    } catch (final ExecutorManagerException e) {
       logger.error(e.getMessage(), e);
       respMap.put("error", e.getMessage());
     }
   }
 
-  private void handleFetchLogEvent(int execId, HttpServletRequest req,
-      HttpServletResponse resp, Map<String, Object> respMap)
+  private void handleFetchLogEvent(final int execId, final HttpServletRequest req,
+      final HttpServletResponse resp, final Map<String, Object> respMap)
       throws ServletException {
-    String type = getParam(req, "type");
-    int startByte = getIntParam(req, "offset");
-    int length = getIntParam(req, "length");
+    final String type = getParam(req, "type");
+    final int startByte = getIntParam(req, "offset");
+    final int length = getIntParam(req, "length");
 
     resp.setContentType("text/plain");
     resp.setCharacterEncoding("utf-8");
 
     if (type.equals("flow")) {
-      LogData result;
+      final LogData result;
       try {
-        result = flowRunnerManager.readFlowLogs(execId, startByte, length);
+        result = this.flowRunnerManager.readFlowLogs(execId, startByte, length);
         respMap.putAll(result.toObject());
-      } catch (Exception e) {
+      } catch (final Exception e) {
         logger.error(e.getMessage(), e);
         respMap.put(RESPONSE_ERROR, e.getMessage());
       }
     } else {
-      int attempt = getIntParam(req, "attempt", 0);
-      String jobId = getParam(req, "jobId");
+      final int attempt = getIntParam(req, "attempt", 0);
+      final String jobId = getParam(req, "jobId");
       try {
-        LogData result =
-            flowRunnerManager.readJobLogs(execId, jobId, attempt, startByte,
+        final LogData result =
+            this.flowRunnerManager.readJobLogs(execId, jobId, attempt, startByte,
                 length);
         respMap.putAll(result.toObject());
-      } catch (Exception e) {
+      } catch (final Exception e) {
         logger.error(e.getMessage(), e);
         respMap.put("error", e.getMessage());
       }
     }
   }
 
-  private void handleFetchAttachmentsEvent(int execId, HttpServletRequest req,
-      HttpServletResponse resp, Map<String, Object> respMap)
+  private void handleFetchAttachmentsEvent(final int execId, final HttpServletRequest req,
+      final HttpServletResponse resp, final Map<String, Object> respMap)
       throws ServletException {
 
-    String jobId = getParam(req, "jobId");
-    int attempt = getIntParam(req, "attempt", 0);
+    final String jobId = getParam(req, "jobId");
+    final int attempt = getIntParam(req, "attempt", 0);
     try {
-      List<Object> result =
-          flowRunnerManager.readJobAttachments(execId, jobId, attempt);
+      final List<Object> result =
+          this.flowRunnerManager.readJobAttachments(execId, jobId, attempt);
       respMap.put("attachments", result);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e.getMessage(), e);
       respMap.put("error", e.getMessage());
     }
   }
 
-  private void handleFetchMetaDataEvent(int execId, HttpServletRequest req,
-      HttpServletResponse resp, Map<String, Object> respMap)
+  private void handleFetchMetaDataEvent(final int execId, final HttpServletRequest req,
+      final HttpServletResponse resp, final Map<String, Object> respMap)
       throws ServletException {
-    int startByte = getIntParam(req, "offset");
-    int length = getIntParam(req, "length");
+    final int startByte = getIntParam(req, "offset");
+    final int length = getIntParam(req, "length");
 
     resp.setContentType("text/plain");
     resp.setCharacterEncoding("utf-8");
 
-    int attempt = getIntParam(req, "attempt", 0);
-    String jobId = getParam(req, "jobId");
+    final int attempt = getIntParam(req, "attempt", 0);
+    final String jobId = getParam(req, "jobId");
     try {
-      JobMetaData result =
-          flowRunnerManager.readJobMetaData(execId, jobId, attempt, startByte,
+      final JobMetaData result =
+          this.flowRunnerManager.readJobMetaData(execId, jobId, attempt, startByte,
               length);
       respMap.putAll(result.toObject());
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e.getMessage(), e);
       respMap.put("error", e.getMessage());
     }
   }
 
-  @SuppressWarnings("unchecked")
-  private void handleAjaxUpdateRequest(HttpServletRequest req,
-      Map<String, Object> respMap) throws ServletException, IOException {
-    ArrayList<Object> updateTimesList =
+  private void handleAjaxUpdateRequest(final HttpServletRequest req,
+      final Map<String, Object> respMap) throws ServletException, IOException {
+    final ArrayList<Object> updateTimesList =
         (ArrayList<Object>) JSONUtils.parseJSONFromString(getParam(req,
             UPDATE_TIME_LIST_PARAM));
-    ArrayList<Object> execIDList =
+    final ArrayList<Object> execIDList =
         (ArrayList<Object>) JSONUtils.parseJSONFromString(getParam(req,
             EXEC_ID_LIST_PARAM));
 
-    ArrayList<Object> updateList = new ArrayList<Object>();
+    final ArrayList<Object> updateList = new ArrayList<>();
     for (int i = 0; i < execIDList.size(); ++i) {
-      long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
-      int execId = (Integer) execIDList.get(i);
+      final long updateTime = JSONUtils.getLongFromObject(updateTimesList.get(i));
+      final int execId = (Integer) execIDList.get(i);
 
-      ExecutableFlowBase flow = flowRunnerManager.getExecutableFlow(execId);
+      final ExecutableFlowBase flow = this.flowRunnerManager.getExecutableFlow(execId);
       if (flow == null) {
-        Map<String, Object> errorResponse = new HashMap<String, Object>();
+        final Map<String, Object> errorResponse = new HashMap<>();
         errorResponse.put(RESPONSE_ERROR, "Flow does not exist");
         errorResponse.put(UPDATE_MAP_EXEC_ID, execId);
         updateList.add(errorResponse);
@@ -276,19 +270,19 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
     respMap.put(RESPONSE_UPDATED_FLOWS, updateList);
   }
 
-  private void handleAjaxExecute(HttpServletRequest req,
-      Map<String, Object> respMap, int execId) throws ServletException {
+  private void handleAjaxExecute(final HttpServletRequest req,
+      final Map<String, Object> respMap, final int execId) throws ServletException {
     try {
-      flowRunnerManager.submitFlow(execId);
-    } catch (ExecutorManagerException e) {
+      this.flowRunnerManager.submitFlow(execId);
+    } catch (final ExecutorManagerException e) {
       e.printStackTrace();
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private void handleAjaxFlowStatus(Map<String, Object> respMap, int execid) {
-    ExecutableFlowBase flow = flowRunnerManager.getExecutableFlow(execid);
+  private void handleAjaxFlowStatus(final Map<String, Object> respMap, final int execid) {
+    final ExecutableFlowBase flow = this.flowRunnerManager.getExecutableFlow(execid);
     if (flow == null) {
       respMap.put(STATUS_PARAM, RESPONSE_NOTFOUND);
     } else {
@@ -297,87 +291,89 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
     }
   }
 
-  private void handleAjaxPause(Map<String, Object> respMap, int execid,
-      String user) throws ServletException {
+  private void handleAjaxPause(final Map<String, Object> respMap, final int execid,
+      final String user) throws ServletException {
     if (user == null) {
       respMap.put(RESPONSE_ERROR, "user has not been set");
       return;
     }
 
     try {
-      flowRunnerManager.pauseFlow(execid, user);
+      this.flowRunnerManager.pauseFlow(execid, user);
       respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
-    } catch (ExecutorManagerException e) {
+    } catch (final ExecutorManagerException e) {
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private void handleAjaxResume(Map<String, Object> respMap, int execid,
-      String user) throws ServletException {
+  private void handleAjaxResume(final Map<String, Object> respMap, final int execid,
+      final String user) throws ServletException {
     if (user == null) {
       respMap.put(RESPONSE_ERROR, "user has not been set");
       return;
     }
 
     try {
-      flowRunnerManager.resumeFlow(execid, user);
+      this.flowRunnerManager.resumeFlow(execid, user);
       respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
-    } catch (ExecutorManagerException e) {
+    } catch (final ExecutorManagerException e) {
       e.printStackTrace();
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private void handleAjaxCancel(Map<String, Object> respMap, int execid,
-      String user) throws ServletException {
+  private void handleAjaxCancel(final Map<String, Object> respMap, final int execid,
+      final String user) throws ServletException {
     if (user == null) {
       respMap.put(RESPONSE_ERROR, "user has not been set");
       return;
     }
 
     try {
-      flowRunnerManager.cancelFlow(execid, user);
+      this.flowRunnerManager.cancelFlow(execid, user);
       respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
-    } catch (ExecutorManagerException e) {
+    } catch (final ExecutorManagerException e) {
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private void handleReloadJobTypePlugins(Map<String, Object> respMap)
+  private void handleReloadJobTypePlugins(final Map<String, Object> respMap)
       throws ServletException {
     try {
-      flowRunnerManager.reloadJobTypePlugins();
+      this.flowRunnerManager.reloadJobTypePlugins();
       respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private void setActive(boolean value, Map<String, Object> respMap)
+  private void setActive(final boolean value, final Map<String, Object> respMap)
       throws ServletException {
     try {
       setActiveInternal(value);
       respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private void setActiveInternal(boolean value)
+  private void setActiveInternal(final boolean value)
       throws ExecutorManagerException {
-    ExecutorLoader executorLoader = application.getExecutorLoader();
-    Executor executor = executorLoader.fetchExecutor(application.getHost(), application.getPort());
+    final ExecutorLoader executorLoader = this.application.getExecutorLoader();
+    final Executor executor = executorLoader.fetchExecutor(this.application.getHost(),
+        this.application.getPort());
     Preconditions.checkState(executor != null, "Unable to obtain self entry in DB");
     if (executor.isActive() != value) {
       executor.setActive(value);
       executorLoader.updateExecutor(executor);
-      flowRunnerManager.setExecutorActive(value);
+      this.flowRunnerManager.setExecutorActive(value);
     } else {
-      logger.warn("Set active action ignored. Executor is already " + (value? "active" : "inactive"));
+      logger.warn(
+          "Set active action ignored. Executor is already " + (value ? "active" : "inactive"));
     }
   }
 
@@ -385,41 +381,41 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
    * Prepare the executor for shutdown.
    *
    * @param respMap json response object
-   * @throws ServletException
    */
-  private void shutdown(Map<String, Object> respMap)
+  private void shutdown(final Map<String, Object> respMap)
       throws ServletException {
     try {
       logger.warn("Shutting down executor...");
 
       // Set the executor to inactive. Will receive no new flows.
       setActiveInternal(false);
-      application.shutdown();
+      this.application.shutdown();
       respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private void getStatus(Map<String, Object> respMap)
+  private void getStatus(final Map<String, Object> respMap)
       throws ServletException {
     try {
-      ExecutorLoader executorLoader = application.getExecutorLoader();
-      final Executor executor = requireNonNull(executorLoader.fetchExecutor(application.getHost(), application.getPort()),
+      final ExecutorLoader executorLoader = this.application.getExecutorLoader();
+      final Executor executor = requireNonNull(
+          executorLoader.fetchExecutor(this.application.getHost(), this.application.getPort()),
           "The executor can not be null");
 
       respMap.put("executor_id", Integer.toString(executor.getId()));
       respMap.put("isActive", String.valueOf(executor.isActive()));
       respMap.put(STATUS_PARAM, RESPONSE_SUCCESS);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e.getMessage(), e);
       respMap.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
   @Override
-  public void doPost(HttpServletRequest req, HttpServletResponse resp)
+  public void doPost(final HttpServletRequest req, final HttpServletResponse resp)
       throws ServletException, IOException {
 
   }
@@ -427,22 +423,23 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
   /**
    * Duplicated code with AbstractAzkabanServlet, but ne
    */
-  public boolean hasParam(HttpServletRequest request, String param) {
+  public boolean hasParam(final HttpServletRequest request, final String param) {
     return request.getParameter(param) != null;
   }
 
-  public String getParam(HttpServletRequest request, String name)
+  public String getParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = request.getParameter(name);
-    if (p == null)
+    final String p = request.getParameter(name);
+    if (p == null) {
       throw new ServletException("Missing required parameter '" + name + "'.");
-    else
+    } else {
       return p;
+    }
   }
 
-  public String getParam(HttpServletRequest request, String name,
-      String defaultVal) {
-    String p = request.getParameter(name);
+  public String getParam(final HttpServletRequest request, final String name,
+      final String defaultVal) {
+    final String p = request.getParameter(name);
     if (p == null) {
       return defaultVal;
     }
@@ -450,17 +447,18 @@ public class ExecutorServlet extends HttpServlet implements ConnectorParams {
     return p;
   }
 
-  public int getIntParam(HttpServletRequest request, String name)
+  public int getIntParam(final HttpServletRequest request, final String name)
       throws ServletException {
-    String p = getParam(request, name);
+    final String p = getParam(request, name);
     return Integer.parseInt(p);
   }
 
-  public int getIntParam(HttpServletRequest request, String name, int defaultVal) {
+  public int getIntParam(final HttpServletRequest request, final String name,
+      final int defaultVal) {
     if (hasParam(request, name)) {
       try {
         return getIntParam(request, name);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         return defaultVal;
       }
     }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/FlowPreparer.java b/azkaban-exec-server/src/main/java/azkaban/execapp/FlowPreparer.java
index c8a4039..243a93c 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/FlowPreparer.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/FlowPreparer.java
@@ -17,6 +17,9 @@
 
 package azkaban.execapp;
 
+import static com.google.common.base.Preconditions.checkState;
+import static java.util.Objects.requireNonNull;
+
 import azkaban.executor.ExecutableFlow;
 import azkaban.project.ProjectFileHandler;
 import azkaban.project.ProjectManagerException;
@@ -34,11 +37,9 @@ import java.util.zip.ZipFile;
 import org.apache.commons.io.FileUtils;
 import org.apache.log4j.Logger;
 
-import static com.google.common.base.Preconditions.*;
-import static java.util.Objects.*;
-
 
 public class FlowPreparer {
+
   private static final Logger log = Logger.getLogger(FlowPreparer.class);
 
   // TODO spyne: move to config class
@@ -49,8 +50,9 @@ public class FlowPreparer {
   private final Map<Pair<Integer, Integer>, ProjectVersion> installedProjects;
   private final StorageManager storageManager;
 
-  public FlowPreparer(StorageManager storageManager, File executionsDir, File projectsDir,
-      Map<Pair<Integer, Integer>, ProjectVersion> installedProjects) {
+  public FlowPreparer(final StorageManager storageManager, final File executionsDir,
+      final File projectsDir,
+      final Map<Pair<Integer, Integer>, ProjectVersion> installedProjects) {
     this.storageManager = storageManager;
     this.executionsDir = executionsDir;
     this.projectsDir = projectsDir;
@@ -62,7 +64,7 @@ public class FlowPreparer {
    *
    * @param flow Executable Flow instance.
    */
-  void setup(ExecutableFlow flow) {
+  void setup(final ExecutableFlow flow) {
     File execDir = null;
     try {
       // First get the ProjectVersion
@@ -79,8 +81,8 @@ public class FlowPreparer {
 
       log.info(String.format("Flow Preparation complete. [execid: %d, path: %s]",
           flow.getExecutionId(), execDir.getPath()));
-    } catch (Exception e) {
-      log.error("Error in setting up project directory: " + projectsDir + ", Exception: " + e);
+    } catch (final Exception e) {
+      log.error("Error in setting up project directory: " + this.projectsDir + ", Exception: " + e);
       cleanup(execDir);
       throw new RuntimeException(e);
     }
@@ -90,8 +92,6 @@ public class FlowPreparer {
    * Prepare the project directory.
    *
    * @param pv ProjectVersion object
-   * @throws ProjectManagerException
-   * @throws IOException
    */
   @VisibleForTesting
   void setupProject(final ProjectVersion pv)
@@ -101,7 +101,7 @@ public class FlowPreparer {
 
     final String projectDir = String.valueOf(projectId) + "." + String.valueOf(version);
     if (pv.getInstalledDir() == null) {
-      pv.setInstalledDir(new File(projectsDir, projectDir));
+      pv.setInstalledDir(new File(this.projectsDir, projectDir));
     }
 
     // If directory exists. Assume its prepared and skip.
@@ -112,14 +112,15 @@ public class FlowPreparer {
 
     log.info("Preparing Project: " + pv);
 
-    File tempDir = new File(projectsDir, "_temp." + projectDir + "." + System.currentTimeMillis());
+    final File tempDir = new File(this.projectsDir,
+        "_temp." + projectDir + "." + System.currentTimeMillis());
 
     // TODO spyne: Why mkdirs? This path should be already set up.
     tempDir.mkdirs();
 
     ProjectFileHandler projectFileHandler = null;
     try {
-      projectFileHandler = requireNonNull(storageManager.getProjectFile(projectId, version));
+      projectFileHandler = requireNonNull(this.storageManager.getProjectFile(projectId, version));
       checkState("zip".equals(projectFileHandler.getFileType()));
 
       log.info("Downloading zip file.");
@@ -141,13 +142,14 @@ public class FlowPreparer {
     }
   }
 
-  private void copyCreateHardlinkDirectory(File projectDir, File execDir) throws IOException {
+  private void copyCreateHardlinkDirectory(final File projectDir, final File execDir)
+      throws IOException {
     FileIOUtils.createDeepHardlink(projectDir, execDir);
   }
 
-  private File createExecDir(ExecutableFlow flow) {
+  private File createExecDir(final ExecutableFlow flow) {
     final int execId = flow.getExecutionId();
-    File execDir = new File(executionsDir, String.valueOf(execId));
+    final File execDir = new File(this.executionsDir, String.valueOf(execId));
     flow.setExecutionPath(execDir.getPath());
 
     // TODO spyne: Why mkdirs? This path should be already set up.
@@ -155,22 +157,23 @@ public class FlowPreparer {
     return execDir;
   }
 
-  private ProjectVersion getProjectVersion(ExecutableFlow flow) {
+  private ProjectVersion getProjectVersion(final ExecutableFlow flow) {
     // We're setting up the installed projects. First time, it may take a while
     // to set up.
     final ProjectVersion projectVersion;
-    synchronized (installedProjects) {
-      projectVersion = installedProjects.computeIfAbsent(new Pair<>(flow.getProjectId(), flow.getVersion()),
-          k -> new ProjectVersion(flow.getProjectId(), flow.getVersion()));
+    synchronized (this.installedProjects) {
+      projectVersion = this.installedProjects
+          .computeIfAbsent(new Pair<>(flow.getProjectId(), flow.getVersion()),
+              k -> new ProjectVersion(flow.getProjectId(), flow.getVersion()));
     }
     return projectVersion;
   }
 
-  private void cleanup(File execDir) {
+  private void cleanup(final File execDir) {
     if (execDir != null) {
       try {
         FileUtils.deleteDirectory(execDir);
-      } catch (IOException e) {
+      } catch (final IOException e) {
         throw new RuntimeException(e);
       }
     }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunner.java b/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunner.java
index 304af6b..0eae405 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunner.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunner.java
@@ -17,28 +17,6 @@
 package azkaban.execapp;
 
 import azkaban.ServiceProvider;
-import azkaban.sla.SlaOption;
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.RejectedExecutionException;
-
-import org.apache.log4j.Appender;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-
 import azkaban.event.Event;
 import azkaban.event.Event.Type;
 import azkaban.event.EventData;
@@ -63,52 +41,64 @@ import azkaban.jobtype.JobTypeManager;
 import azkaban.metric.MetricReportManager;
 import azkaban.project.ProjectLoader;
 import azkaban.project.ProjectManagerException;
+import azkaban.sla.SlaOption;
 import azkaban.utils.Props;
 import azkaban.utils.PropsUtils;
 import azkaban.utils.SwapQueue;
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.RejectedExecutionException;
+import org.apache.log4j.Appender;
+import org.apache.log4j.FileAppender;
+import org.apache.log4j.Layout;
+import org.apache.log4j.Logger;
+import org.apache.log4j.PatternLayout;
 
 /**
  * Class that handles the running of a ExecutableFlow DAG
- *
  */
 public class FlowRunner extends EventHandler implements Runnable {
+
   private static final Layout DEFAULT_LAYOUT = new PatternLayout(
       "%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
   // We check update every 5 minutes, just in case things get stuck. But for the
   // most part, we'll be idling.
   private static final long CHECK_WAIT_MS = 5 * 60 * 1000;
-
-  private Logger logger;
-  private Layout loggerLayout = DEFAULT_LAYOUT;
-  private Appender flowAppender;
-  private File logFile;
-
-  private ExecutorService executorService;
-  private ExecutorLoader executorLoader;
-  private ProjectLoader projectLoader;
-
-  private int execId;
-  private File execDir;
   private final ExecutableFlow flow;
-  private Thread flowRunnerThread;
-  private int numJobThreads = 10;
-  private ExecutionOptions.FailureAction failureAction;
-
   // Sync object for queuing
   private final Object mainSyncObj = new Object();
-
-  // Properties map
-  private Props azkabanProps;
-  private Map<String, Props> sharedProps = new HashMap<String, Props>();
   private final JobTypeManager jobtypeManager;
-
-  private JobRunnerEventListener listener = new JobRunnerEventListener();
-  private Set<JobRunner> activeJobRunners = Collections
+  private final Layout loggerLayout = DEFAULT_LAYOUT;
+  private final ExecutorLoader executorLoader;
+  private final ProjectLoader projectLoader;
+  private final int execId;
+  private final File execDir;
+  private final ExecutionOptions.FailureAction failureAction;
+  // Properties map
+  private final Props azkabanProps;
+  private final Map<String, Props> sharedProps = new HashMap<>();
+  private final JobRunnerEventListener listener = new JobRunnerEventListener();
+  private final Set<JobRunner> activeJobRunners = Collections
       .newSetFromMap(new ConcurrentHashMap<JobRunner, Boolean>());
-
   // Thread safe swap queue for finishedExecutions.
-  private SwapQueue<ExecutableNode> finishedNodes;
-
+  private final SwapQueue<ExecutableNode> finishedNodes;
+  private Logger logger;
+  private Appender flowAppender;
+  private File logFile;
+  private ExecutorService executorService;
+  private Thread flowRunnerThread;
+  private int numJobThreads = 10;
   // Used for pipelining
   private Integer pipelineLevel = null;
   private Integer pipelineExecId = null;
@@ -132,15 +122,10 @@ public class FlowRunner extends EventHandler implements Runnable {
 
   /**
    * Constructor. This will create its own ExecutorService for thread pools
-   *
-   * @param flow
-   * @param executorLoader
-   * @param projectLoader
-   * @param jobtypeManager
-   * @throws ExecutorManagerException
    */
-  public FlowRunner(ExecutableFlow flow, ExecutorLoader executorLoader,
-      ProjectLoader projectLoader, JobTypeManager jobtypeManager, Props azkabanProps)
+  public FlowRunner(final ExecutableFlow flow, final ExecutorLoader executorLoader,
+      final ProjectLoader projectLoader, final JobTypeManager jobtypeManager,
+      final Props azkabanProps)
       throws ExecutorManagerException {
     this(flow, executorLoader, projectLoader, jobtypeManager, null, azkabanProps);
   }
@@ -148,17 +133,11 @@ public class FlowRunner extends EventHandler implements Runnable {
   /**
    * Constructor. If executorService is null, then it will create it's own for
    * thread pools.
-   *
-   * @param flow
-   * @param executorLoader
-   * @param projectLoader
-   * @param jobtypeManager
-   * @param executorService
-   * @throws ExecutorManagerException
    */
-  public FlowRunner(ExecutableFlow flow, ExecutorLoader executorLoader,
-      ProjectLoader projectLoader, JobTypeManager jobtypeManager,
-      ExecutorService executorService, Props azkabanProps) throws ExecutorManagerException {
+  public FlowRunner(final ExecutableFlow flow, final ExecutorLoader executorLoader,
+      final ProjectLoader projectLoader, final JobTypeManager jobtypeManager,
+      final ExecutorService executorService, final Props azkabanProps)
+      throws ExecutorManagerException {
     this.execId = flow.getExecutionId();
     this.flow = flow;
     this.executorLoader = executorLoader;
@@ -166,13 +145,13 @@ public class FlowRunner extends EventHandler implements Runnable {
     this.execDir = new File(flow.getExecutionPath());
     this.jobtypeManager = jobtypeManager;
 
-    ExecutionOptions options = flow.getExecutionOptions();
+    final ExecutionOptions options = flow.getExecutionOptions();
     this.pipelineLevel = options.getPipelineLevel();
     this.pipelineExecId = options.getPipelineExecutionId();
     this.failureAction = options.getFailureAction();
     this.proxyUsers = flow.getProxyUsers();
     this.executorService = executorService;
-    this.finishedNodes = new SwapQueue<ExecutableNode>();
+    this.finishedNodes = new SwapQueue<>();
     this.azkabanProps = azkabanProps;
 
     // Create logger and execution dir in flowRunner initialization instead of flow runtime to avoid NPE
@@ -180,120 +159,120 @@ public class FlowRunner extends EventHandler implements Runnable {
     createLogger(this.flow.getFlowId());
   }
 
-  public FlowRunner setFlowWatcher(FlowWatcher watcher) {
+  public FlowRunner setFlowWatcher(final FlowWatcher watcher) {
     this.watcher = watcher;
     return this;
   }
 
-  public FlowRunner setNumJobThreads(int jobs) {
-    numJobThreads = jobs;
+  public FlowRunner setNumJobThreads(final int jobs) {
+    this.numJobThreads = jobs;
     return this;
   }
 
-  public FlowRunner setJobLogSettings(String jobLogFileSize, int jobLogNumFiles) {
+  public FlowRunner setJobLogSettings(final String jobLogFileSize, final int jobLogNumFiles) {
     this.jobLogFileSize = jobLogFileSize;
     this.jobLogNumFiles = jobLogNumFiles;
 
     return this;
   }
 
-  public FlowRunner setValidateProxyUser(boolean validateUserProxy) {
+  public FlowRunner setValidateProxyUser(final boolean validateUserProxy) {
     this.validateUserProxy = validateUserProxy;
     return this;
   }
 
   public File getExecutionDir() {
-    return execDir;
+    return this.execDir;
   }
 
   @Override
   public void run() {
     try {
       if (this.executorService == null) {
-        this.executorService = Executors.newFixedThreadPool(numJobThreads);
+        this.executorService = Executors.newFixedThreadPool(this.numJobThreads);
       }
       setupFlowExecution();
-      flow.setStartTime(System.currentTimeMillis());
+      this.flow.setStartTime(System.currentTimeMillis());
 
       updateFlowReference();
 
-      logger.info("Updating initial flow directory.");
+      this.logger.info("Updating initial flow directory.");
       updateFlow();
-      logger.info("Fetching job and shared properties.");
+      this.logger.info("Fetching job and shared properties.");
       loadAllProperties();
 
-      this.fireEventListeners(Event.create(this, Type.FLOW_STARTED, new EventData(this.getExecutableFlow())));
+      this.fireEventListeners(
+          Event.create(this, Type.FLOW_STARTED, new EventData(this.getExecutableFlow())));
       runFlow();
-    } catch (Throwable t) {
-      if (logger != null) {
-        logger
+    } catch (final Throwable t) {
+      if (this.logger != null) {
+        this.logger
             .error(
                 "An error has occurred during the running of the flow. Quiting.",
                 t);
       }
-      flow.setStatus(Status.FAILED);
+      this.flow.setStatus(Status.FAILED);
     } finally {
-      if (watcher != null) {
-        logger.info("Watcher is attached. Stopping watcher.");
-        watcher.stopWatcher();
-        logger
-            .info("Watcher cancelled status is " + watcher.isWatchCancelled());
+      if (this.watcher != null) {
+        this.logger.info("Watcher is attached. Stopping watcher.");
+        this.watcher.stopWatcher();
+        this.logger
+            .info("Watcher cancelled status is " + this.watcher.isWatchCancelled());
       }
 
-      flow.setEndTime(System.currentTimeMillis());
-      logger.info("Setting end time for flow " + execId + " to "
+      this.flow.setEndTime(System.currentTimeMillis());
+      this.logger.info("Setting end time for flow " + this.execId + " to "
           + System.currentTimeMillis());
       closeLogger();
 
       updateFlow();
-      this.fireEventListeners(Event.create(this, Type.FLOW_FINISHED, new EventData(flow)));
+      this.fireEventListeners(Event.create(this, Type.FLOW_FINISHED, new EventData(this.flow)));
     }
   }
 
-  @SuppressWarnings("unchecked")
   private void setupFlowExecution() {
-    int projectId = flow.getProjectId();
-    int version = flow.getVersion();
-    String flowId = flow.getFlowId();
+    final int projectId = this.flow.getProjectId();
+    final int version = this.flow.getVersion();
+    final String flowId = this.flow.getFlowId();
 
     // Add a bunch of common azkaban properties
-    Props commonFlowProps = PropsUtils.addCommonFlowProperties(null, flow);
+    Props commonFlowProps = PropsUtils.addCommonFlowProperties(null, this.flow);
 
-    if (flow.getJobSource() != null) {
-      String source = flow.getJobSource();
-      Props flowProps = sharedProps.get(source);
+    if (this.flow.getJobSource() != null) {
+      final String source = this.flow.getJobSource();
+      final Props flowProps = this.sharedProps.get(source);
       flowProps.setParent(commonFlowProps);
       commonFlowProps = flowProps;
     }
 
     // If there are flow overrides, we apply them now.
-    Map<String, String> flowParam =
-        flow.getExecutionOptions().getFlowParameters();
+    final Map<String, String> flowParam =
+        this.flow.getExecutionOptions().getFlowParameters();
     if (flowParam != null && !flowParam.isEmpty()) {
       commonFlowProps = new Props(commonFlowProps, flowParam);
     }
-    flow.setInputProps(commonFlowProps);
+    this.flow.setInputProps(commonFlowProps);
 
     if (this.watcher != null) {
-      this.watcher.setLogger(logger);
+      this.watcher.setLogger(this.logger);
     }
 
-    logger.info("Assigned executor : " + AzkabanExecutorServer.getApp().getExecutorHostPort());
-    logger.info("Running execid:" + execId + " flow:" + flowId + " project:"
+    this.logger.info("Assigned executor : " + AzkabanExecutorServer.getApp().getExecutorHostPort());
+    this.logger.info("Running execid:" + this.execId + " flow:" + flowId + " project:"
         + projectId + " version:" + version);
-    if (pipelineExecId != null) {
-      logger.info("Running simulateously with " + pipelineExecId
-          + ". Pipelining level " + pipelineLevel);
+    if (this.pipelineExecId != null) {
+      this.logger.info("Running simulateously with " + this.pipelineExecId
+          + ". Pipelining level " + this.pipelineLevel);
     }
 
     // The current thread is used for interrupting blocks
-    flowRunnerThread = Thread.currentThread();
-    flowRunnerThread.setName("FlowRunner-exec-" + flow.getExecutionId());
+    this.flowRunnerThread = Thread.currentThread();
+    this.flowRunnerThread.setName("FlowRunner-exec-" + this.flow.getExecutionId());
   }
 
   private void updateFlowReference() throws ExecutorManagerException {
-    logger.info("Update active reference");
-    if (!executorLoader.updateExecutableReference(execId,
+    this.logger.info("Update active reference");
+    if (!this.executorLoader.updateExecutableReference(this.execId,
         System.currentTimeMillis())) {
       throw new ExecutorManagerException(
           "The executor reference doesn't exist. May have been killed prematurely.");
@@ -304,47 +283,46 @@ public class FlowRunner extends EventHandler implements Runnable {
     updateFlow(System.currentTimeMillis());
   }
 
-  private synchronized void updateFlow(long time) {
+  private synchronized void updateFlow(final long time) {
     try {
-      flow.setUpdateTime(time);
-      executorLoader.updateExecutableFlow(flow);
-    } catch (ExecutorManagerException e) {
-      logger.error("Error updating flow.", e);
+      this.flow.setUpdateTime(time);
+      this.executorLoader.updateExecutableFlow(this.flow);
+    } catch (final ExecutorManagerException e) {
+      this.logger.error("Error updating flow.", e);
     }
   }
 
 
   /**
    * setup logger and execution dir for the flowId
-   * @param flowId
    */
-  private void createLogger(String flowId) {
+  private void createLogger(final String flowId) {
     // Create logger
-    String loggerName = execId + "." + flowId;
-    logger = Logger.getLogger(loggerName);
+    final String loggerName = this.execId + "." + flowId;
+    this.logger = Logger.getLogger(loggerName);
 
     // Create file appender
-    String logName = "_flow." + loggerName + ".log";
-    logFile = new File(execDir, logName);
-    String absolutePath = logFile.getAbsolutePath();
+    final String logName = "_flow." + loggerName + ".log";
+    this.logFile = new File(this.execDir, logName);
+    final String absolutePath = this.logFile.getAbsolutePath();
 
-    flowAppender = null;
+    this.flowAppender = null;
     try {
-      flowAppender = new FileAppender(loggerLayout, absolutePath, false);
-      logger.addAppender(flowAppender);
-    } catch (IOException e) {
-      logger.error("Could not open log file in " + execDir, e);
+      this.flowAppender = new FileAppender(this.loggerLayout, absolutePath, false);
+      this.logger.addAppender(this.flowAppender);
+    } catch (final IOException e) {
+      this.logger.error("Could not open log file in " + this.execDir, e);
     }
   }
 
   private void closeLogger() {
-    if (logger != null) {
-      logger.removeAppender(flowAppender);
-      flowAppender.close();
+    if (this.logger != null) {
+      this.logger.removeAppender(this.flowAppender);
+      this.flowAppender.close();
 
       try {
-        executorLoader.uploadLogFile(execId, "", 0, logFile);
-      } catch (ExecutorManagerException e) {
+        this.executorLoader.uploadLogFile(this.execId, "", 0, this.logFile);
+      } catch (final ExecutorManagerException e) {
         e.printStackTrace();
       }
     }
@@ -352,21 +330,21 @@ public class FlowRunner extends EventHandler implements Runnable {
 
   private void loadAllProperties() throws IOException {
     // First load all the properties
-    for (FlowProps fprops : flow.getFlowProps()) {
-      String source = fprops.getSource();
-      File propsPath = new File(execDir, source);
-      Props props = new Props(null, propsPath);
-      sharedProps.put(source, props);
+    for (final FlowProps fprops : this.flow.getFlowProps()) {
+      final String source = fprops.getSource();
+      final File propsPath = new File(this.execDir, source);
+      final Props props = new Props(null, propsPath);
+      this.sharedProps.put(source, props);
     }
 
     // Resolve parents
-    for (FlowProps fprops : flow.getFlowProps()) {
+    for (final FlowProps fprops : this.flow.getFlowProps()) {
       if (fprops.getInheritedSource() != null) {
-        String source = fprops.getSource();
-        String inherit = fprops.getInheritedSource();
+        final String source = fprops.getSource();
+        final String inherit = fprops.getInheritedSource();
 
-        Props props = sharedProps.get(source);
-        Props inherits = sharedProps.get(inherit);
+        final Props props = this.sharedProps.get(source);
+        final Props inherits = this.sharedProps.get(inherit);
 
         props.setParent(inherits);
       }
@@ -375,61 +353,59 @@ public class FlowRunner extends EventHandler implements Runnable {
 
   /**
    * Main method that executes the jobs.
-   *
-   * @throws Exception
    */
   private void runFlow() throws Exception {
-    logger.info("Starting flows");
+    this.logger.info("Starting flows");
     runReadyJob(this.flow);
     updateFlow();
 
-    while (!flowFinished) {
-      synchronized (mainSyncObj) {
-        if (flowPaused) {
+    while (!this.flowFinished) {
+      synchronized (this.mainSyncObj) {
+        if (this.flowPaused) {
           try {
-            mainSyncObj.wait(CHECK_WAIT_MS);
-          } catch (InterruptedException e) {
+            this.mainSyncObj.wait(CHECK_WAIT_MS);
+          } catch (final InterruptedException e) {
           }
 
           continue;
         } else {
-          if (retryFailedJobs) {
+          if (this.retryFailedJobs) {
             retryAllFailures();
           } else if (!progressGraph()) {
             try {
-              mainSyncObj.wait(CHECK_WAIT_MS);
-            } catch (InterruptedException e) {
+              this.mainSyncObj.wait(CHECK_WAIT_MS);
+            } catch (final InterruptedException e) {
             }
           }
         }
       }
     }
 
-    logger.info("Finishing up flow. Awaiting Termination");
-    executorService.shutdown();
+    this.logger.info("Finishing up flow. Awaiting Termination");
+    this.executorService.shutdown();
 
     updateFlow();
-    logger.info("Finished Flow");
+    this.logger.info("Finished Flow");
   }
 
   private void retryAllFailures() throws IOException {
-    logger.info("Restarting all failed jobs");
+    this.logger.info("Restarting all failed jobs");
 
     this.retryFailedJobs = false;
     this.flowKilled = false;
     this.flowFailed = false;
     this.flow.setStatus(Status.RUNNING);
 
-    ArrayList<ExecutableNode> retryJobs = new ArrayList<ExecutableNode>();
+    final ArrayList<ExecutableNode> retryJobs = new ArrayList<>();
     resetFailedState(this.flow, retryJobs);
 
-    for (ExecutableNode node : retryJobs) {
+    for (final ExecutableNode node : retryJobs) {
       if (node.getStatus() == Status.READY
           || node.getStatus() == Status.DISABLED) {
         runReadyJob(node);
       } else if (node.getStatus() == Status.SUCCEEDED) {
-        for (String outNodeId : node.getOutNodes()) {
-          ExecutableFlowBase base = node.getParentFlow();
+        for (final String outNodeId : node.getOutNodes()) {
+          final ExecutableFlowBase base = node.getParentFlow();
           runReadyJob(base.getExecutableNode(outNodeId));
         }
       }
@@ -441,12 +417,12 @@ public class FlowRunner extends EventHandler implements Runnable {
   }
 
   private boolean progressGraph() throws IOException {
-    finishedNodes.swap();
+    this.finishedNodes.swap();
 
     // The following nodes are finished, so we'll collect a list of outnodes
     // that are candidates for running next.
-    HashSet<ExecutableNode> nodesToCheck = new HashSet<ExecutableNode>();
-    for (ExecutableNode node : finishedNodes) {
+    final HashSet<ExecutableNode> nodesToCheck = new HashSet<>();
+    for (final ExecutableNode node : this.finishedNodes) {
       Set<String> outNodeIds = node.getOutNodes();
       ExecutableFlowBase parentFlow = node.getParentFlow();
 
@@ -457,7 +433,7 @@ public class FlowRunner extends EventHandler implements Runnable {
         // fail the job and its flow now.
         if (!retryJobIfPossible(node)) {
           propagateStatus(node.getParentFlow(), Status.FAILED_FINISHING);
-          if (failureAction == FailureAction.CANCEL_ALL) {
+          if (this.failureAction == FailureAction.CANCEL_ALL) {
             this.kill();
           }
           this.flowFailed = true;
@@ -483,8 +459,8 @@ public class FlowRunner extends EventHandler implements Runnable {
       // Add all out nodes from the finished job. We'll check against this set
       // to
       // see if any are candidates for running.
-      for (String nodeId : outNodeIds) {
-        ExecutableNode outNode = parentFlow.getExecutableNode(nodeId);
+      for (final String nodeId : outNodeIds) {
+        final ExecutableNode outNode = parentFlow.getExecutableNode(nodeId);
         nodesToCheck.add(outNode);
       }
     }
@@ -493,7 +469,7 @@ public class FlowRunner extends EventHandler implements Runnable {
     // before
     // Instant kill or skip if necessary.
     boolean jobsRun = false;
-    for (ExecutableNode node : nodesToCheck) {
+    for (final ExecutableNode node : nodesToCheck) {
       if (Status.isStatusFinished(node.getStatus())
           || Status.isStatusRunning(node.getStatus())) {
         // Really shouldn't get in here.
@@ -503,7 +479,7 @@ public class FlowRunner extends EventHandler implements Runnable {
       jobsRun |= runReadyJob(node);
     }
 
-    if (jobsRun || finishedNodes.getSize() > 0) {
+    if (jobsRun || this.finishedNodes.getSize() > 0) {
       updateFlow();
       return true;
     }
@@ -511,36 +487,36 @@ public class FlowRunner extends EventHandler implements Runnable {
     return false;
   }
 
-  private boolean runReadyJob(ExecutableNode node) throws IOException {
+  private boolean runReadyJob(final ExecutableNode node) throws IOException {
     if (Status.isStatusFinished(node.getStatus())
         || Status.isStatusRunning(node.getStatus())) {
       return false;
     }
 
-    Status nextNodeStatus = getImpliedStatus(node);
+    final Status nextNodeStatus = getImpliedStatus(node);
     if (nextNodeStatus == null) {
       return false;
     }
 
     if (nextNodeStatus == Status.CANCELLED) {
-      logger.info("Cancelling '" + node.getNestedId()
+      this.logger.info("Cancelling '" + node.getNestedId()
           + "' due to prior errors.");
       node.cancelNode(System.currentTimeMillis());
       finishExecutableNode(node);
     } else if (nextNodeStatus == Status.SKIPPED) {
-      logger.info("Skipping disabled job '" + node.getId() + "'.");
+      this.logger.info("Skipping disabled job '" + node.getId() + "'.");
       node.skipNode(System.currentTimeMillis());
       finishExecutableNode(node);
     } else if (nextNodeStatus == Status.READY) {
       if (node instanceof ExecutableFlowBase) {
-        ExecutableFlowBase flow = ((ExecutableFlowBase) node);
-        logger.info("Running flow '" + flow.getNestedId() + "'.");
+        final ExecutableFlowBase flow = ((ExecutableFlowBase) node);
+        this.logger.info("Running flow '" + flow.getNestedId() + "'.");
         flow.setStatus(Status.RUNNING);
         flow.setStartTime(System.currentTimeMillis());
         prepareJobProperties(flow);
 
-        for (String startNodeId : ((ExecutableFlowBase) node).getStartNodes()) {
-          ExecutableNode startNode = flow.getExecutableNode(startNodeId);
+        for (final String startNodeId : ((ExecutableFlowBase) node).getStartNodes()) {
+          final ExecutableNode startNode = flow.getExecutableNode(startNodeId);
           runReadyJob(startNode);
         }
       } else {
@@ -550,20 +526,20 @@ public class FlowRunner extends EventHandler implements Runnable {
     return true;
   }
 
-  private boolean retryJobIfPossible(ExecutableNode node) {
+  private boolean retryJobIfPossible(final ExecutableNode node) {
     if (node instanceof ExecutableFlowBase) {
       return false;
     }
 
     if (node.getRetries() > node.getAttempt()) {
-      logger.info("Job '" + node.getId() + "' will be retried. Attempt "
+      this.logger.info("Job '" + node.getId() + "' will be retried. Attempt "
           + node.getAttempt() + " of " + node.getRetries());
       node.setDelayedExecution(node.getRetryBackoff());
       node.resetForRetry();
       return true;
     } else {
       if (node.getRetries() > 0) {
-        logger.info("Job '" + node.getId() + "' has run out of retry attempts");
+        this.logger.info("Job '" + node.getId() + "' has run out of retry attempts");
         // Setting delayed execution to 0 in case this is manually re-tried.
         node.setDelayedExecution(0);
       }
@@ -572,9 +548,9 @@ public class FlowRunner extends EventHandler implements Runnable {
     }
   }
 
-  private void propagateStatus(ExecutableFlowBase base, Status status) {
+  private void propagateStatus(final ExecutableFlowBase base, final Status status) {
     if (!Status.isStatusFinished(base.getStatus())) {
-      logger.info("Setting " + base.getNestedId() + " to " + status);
+      this.logger.info("Setting " + base.getNestedId() + " to " + status);
       base.setStatus(status);
       if (base.getParentFlow() != null) {
         propagateStatus(base.getParentFlow(), status);
@@ -582,22 +558,22 @@ public class FlowRunner extends EventHandler implements Runnable {
     }
   }
 
-  private void finishExecutableNode(ExecutableNode node) {
-    finishedNodes.add(node);
-    EventData eventData = new EventData(node.getStatus(), node.getNestedId());
+  private void finishExecutableNode(final ExecutableNode node) {
+    this.finishedNodes.add(node);
+    final EventData eventData = new EventData(node.getStatus(), node.getNestedId());
     fireEventListeners(Event.create(this, Type.JOB_FINISHED, eventData));
   }
 
-  private void finalizeFlow(ExecutableFlowBase flow) {
-    String id = flow == this.flow ? "" : flow.getNestedId();
+  private void finalizeFlow(final ExecutableFlowBase flow) {
+    final String id = flow == this.flow ? "" : flow.getNestedId();
 
     // If it's not the starting flow, we'll create set of output props
     // for the finished flow.
     boolean succeeded = true;
     Props previousOutput = null;
 
-    for (String end : flow.getEndNodes()) {
-      ExecutableNode node = flow.getExecutableNode(end);
+    for (final String end : flow.getEndNodes()) {
+      final ExecutableNode node = flow.getExecutableNode(end);
 
       if (node.getStatus() == Status.KILLED
           || node.getStatus() == Status.FAILED
@@ -620,34 +596,34 @@ public class FlowRunner extends EventHandler implements Runnable {
 
     flow.setEndTime(System.currentTimeMillis());
     flow.setUpdateTime(System.currentTimeMillis());
-    long durationSec = (flow.getEndTime() - flow.getStartTime()) / 1000;
+    final long durationSec = (flow.getEndTime() - flow.getStartTime()) / 1000;
     switch (flow.getStatus()) {
-    case FAILED_FINISHING:
-      logger.info("Setting flow '" + id + "' status to FAILED in "
-          + durationSec + " seconds");
-      flow.setStatus(Status.FAILED);
-      break;
-    case FAILED:
-    case KILLED:
-    case CANCELLED:
-    case FAILED_SUCCEEDED:
-      logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString()
-          + " in " + durationSec + " seconds");
-      break;
-    default:
-      flow.setStatus(Status.SUCCEEDED);
-      logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString()
-          + " in " + durationSec + " seconds");
+      case FAILED_FINISHING:
+        this.logger.info("Setting flow '" + id + "' status to FAILED in "
+            + durationSec + " seconds");
+        flow.setStatus(Status.FAILED);
+        break;
+      case FAILED:
+      case KILLED:
+      case CANCELLED:
+      case FAILED_SUCCEEDED:
+        this.logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString()
+            + " in " + durationSec + " seconds");
+        break;
+      default:
+        flow.setStatus(Status.SUCCEEDED);
+        this.logger.info("Flow '" + id + "' is set to " + flow.getStatus().toString()
+            + " in " + durationSec + " seconds");
     }
 
     // If the finalized flow is actually the top level flow, than we finish
     // the main loop.
     if (flow instanceof ExecutableFlow) {
-      flowFinished = true;
+      this.flowFinished = true;
     }
   }
 
-  private void prepareJobProperties(ExecutableNode node) throws IOException {
+  private void prepareJobProperties(final ExecutableNode node) throws IOException {
     if (node instanceof ExecutableFlow) {
       return;
     }
@@ -657,7 +633,7 @@ public class FlowRunner extends EventHandler implements Runnable {
     // the
     // least precedence
     if (!(node instanceof ExecutableFlowBase)) {
-      String sharedProps = node.getPropsSource();
+      final String sharedProps = node.getPropsSource();
       if (sharedProps != null) {
         props = this.sharedProps.get(sharedProps);
       }
@@ -665,22 +641,22 @@ public class FlowRunner extends EventHandler implements Runnable {
 
     // The following is the hiearchical ordering of dependency resolution
     // 2. Parent Flow Properties
-    ExecutableFlowBase parentFlow = node.getParentFlow();
+    final ExecutableFlowBase parentFlow = node.getParentFlow();
     if (parentFlow != null) {
-      Props flowProps = Props.clone(parentFlow.getInputProps());
+      final Props flowProps = Props.clone(parentFlow.getInputProps());
       flowProps.setEarliestAncestor(props);
       props = flowProps;
     }
 
     // 3. Output Properties. The call creates a clone, so we can overwrite it.
-    Props outputProps = collectOutputProps(node);
+    final Props outputProps = collectOutputProps(node);
     if (outputProps != null) {
       outputProps.setEarliestAncestor(props);
       props = outputProps;
     }
 
     // 4. The job source.
-    Props jobSource = loadJobProps(node);
+    final Props jobSource = loadJobProps(node);
     if (jobSource != null) {
       jobSource.setParent(props);
       props = jobSource;
@@ -690,18 +666,17 @@ public class FlowRunner extends EventHandler implements Runnable {
   }
 
   /**
-   * @param props
-   * This method is to put in any job properties customization before feeding
-   * to the job.
+   * @param props This method is to put in any job properties customization before feeding to the
+   * job.
    */
-  private void customizeJobProperties(Props props) {
-    boolean memoryCheck = flow.getExecutionOptions().getMemoryCheck();
+  private void customizeJobProperties(final Props props) {
+    final boolean memoryCheck = this.flow.getExecutionOptions().getMemoryCheck();
     props.put(ProcessJob.AZKABAN_MEMORY_CHECK, Boolean.toString(memoryCheck));
   }
 
-  private Props loadJobProps(ExecutableNode node) throws IOException {
+  private Props loadJobProps(final ExecutableNode node) throws IOException {
     Props props = null;
-    String source = node.getJobSource();
+    final String source = node.getJobSource();
     if (source == null) {
       return null;
     }
@@ -709,22 +684,22 @@ public class FlowRunner extends EventHandler implements Runnable {
     // load the override props if any
     try {
       props =
-          projectLoader.fetchProjectProperty(flow.getProjectId(),
-              flow.getVersion(), node.getId() + ".jor");
-    } catch (ProjectManagerException e) {
+          this.projectLoader.fetchProjectProperty(this.flow.getProjectId(),
+              this.flow.getVersion(), node.getId() + ".jor");
+    } catch (final ProjectManagerException e) {
       e.printStackTrace();
-      logger.error("Error loading job override property for job "
+      this.logger.error("Error loading job override property for job "
           + node.getId());
     }
 
-    File path = new File(execDir, source);
+    final File path = new File(this.execDir, source);
     if (props == null) {
       // if no override prop, load the original one on disk
       try {
         props = new Props(null, path);
-      } catch (IOException e) {
+      } catch (final IOException e) {
         e.printStackTrace();
-        logger.error("Error loading job file " + source + " for job "
+        this.logger.error("Error loading job file " + source + " for job "
             + node.getId());
       }
     }
@@ -739,30 +714,26 @@ public class FlowRunner extends EventHandler implements Runnable {
     return props;
   }
 
-  private void runExecutableNode(ExecutableNode node) throws IOException {
+  private void runExecutableNode(final ExecutableNode node) throws IOException {
     // Collect output props from the job's dependencies.
     prepareJobProperties(node);
 
     node.setStatus(Status.QUEUED);
-    JobRunner runner = createJobRunner(node);
-    logger.info("Submitting job '" + node.getNestedId() + "' to run.");
+    final JobRunner runner = createJobRunner(node);
+    this.logger.info("Submitting job '" + node.getNestedId() + "' to run.");
     try {
-      executorService.submit(runner);
-      activeJobRunners.add(runner);
-    } catch (RejectedExecutionException e) {
-      logger.error(e);
+      this.executorService.submit(runner);
+      this.activeJobRunners.add(runner);
+    } catch (final RejectedExecutionException e) {
+      this.logger.error(e);
     }
-    ;
   }
 
   /**
    * Determines what the state of the next node should be. Returns null if the
    * node should not be run.
-   *
-   * @param node
-   * @return
    */
-  public Status getImpliedStatus(ExecutableNode node) {
+  public Status getImpliedStatus(final ExecutableNode node) {
     // If it's running or finished with 'SUCCEEDED', than don't even
     // bother starting this job.
     if (Status.isStatusRunning(node.getStatus())
@@ -773,11 +744,11 @@ public class FlowRunner extends EventHandler implements Runnable {
     // Go through the node's dependencies. If all of the previous job's
     // statuses is finished and not FAILED or KILLED, than we can safely
     // run this job.
-    ExecutableFlowBase flow = node.getParentFlow();
+    final ExecutableFlowBase flow = node.getParentFlow();
     boolean shouldKill = false;
-    for (String dependency : node.getInNodes()) {
-      ExecutableNode dependencyNode = flow.getExecutableNode(dependency);
-      Status depStatus = dependencyNode.getStatus();
+    for (final String dependency : node.getInNodes()) {
+      final ExecutableNode dependencyNode = flow.getExecutableNode(dependency);
+      final Status depStatus = dependencyNode.getStatus();
 
       if (!Status.isStatusFinished(depStatus)) {
         return null;
@@ -798,8 +769,8 @@ public class FlowRunner extends EventHandler implements Runnable {
     // If the flow has failed, and we want to finish only the currently running
     // jobs, we just
     // kill everything else. We also kill, if the flow has been cancelled.
-    if (flowFailed
-        && failureAction == ExecutionOptions.FailureAction.FINISH_CURRENTLY_RUNNING) {
+    if (this.flowFailed
+        && this.failureAction == ExecutionOptions.FailureAction.FINISH_CURRENTLY_RUNNING) {
       return Status.CANCELLED;
     } else if (shouldKill || isKilled()) {
       return Status.CANCELLED;
@@ -809,10 +780,10 @@ public class FlowRunner extends EventHandler implements Runnable {
     return Status.READY;
   }
 
-  private Props collectOutputProps(ExecutableNode node) {
+  private Props collectOutputProps(final ExecutableNode node) {
     Props previousOutput = null;
     // Iterate the in nodes again and create the dependencies
-    for (String dependency : node.getInNodes()) {
+    for (final String dependency : node.getInNodes()) {
       Props output =
           node.getParentFlow().getExecutableNode(dependency).getOutputProps();
       if (output != null) {
@@ -825,23 +796,23 @@ public class FlowRunner extends EventHandler implements Runnable {
     return previousOutput;
   }
 
-  private JobRunner createJobRunner(ExecutableNode node) {
+  private JobRunner createJobRunner(final ExecutableNode node) {
     // Load job file.
-    File path = new File(execDir, node.getJobSource());
+    final File path = new File(this.execDir, node.getJobSource());
 
-    JobRunner jobRunner =
-        new JobRunner(node, path.getParentFile(), executorLoader,
-            jobtypeManager, azkabanProps);
-    if (watcher != null) {
-      jobRunner.setPipeline(watcher, pipelineLevel);
+    final JobRunner jobRunner =
+        new JobRunner(node, path.getParentFile(), this.executorLoader,
+            this.jobtypeManager, this.azkabanProps);
+    if (this.watcher != null) {
+      jobRunner.setPipeline(this.watcher, this.pipelineLevel);
     }
-    if (validateUserProxy) {
-      jobRunner.setValidatedProxyUsers(proxyUsers);
+    if (this.validateUserProxy) {
+      jobRunner.setValidatedProxyUsers(this.proxyUsers);
     }
 
     jobRunner.setDelayStart(node.getDelayedExecution());
-    jobRunner.setLogSettings(logger, jobLogFileSize, jobLogNumFiles);
-    jobRunner.addListener(listener);
+    jobRunner.setLogSettings(this.logger, this.jobLogFileSize, this.jobLogNumFiles);
+    jobRunner.addListener(this.listener);
 
     if (JobCallbackManager.isInitialized()) {
       jobRunner.addListener(JobCallbackManager.getInstance());
@@ -854,13 +825,11 @@ public class FlowRunner extends EventHandler implements Runnable {
 
   /**
    * Configure Azkaban metrics tracking for a new jobRunner instance
-   *
-   * @param jobRunner
    */
-  private void configureJobLevelMetrics(JobRunner jobRunner) {
-    logger.info("Configuring Azkaban metrics tracking for jobrunner object");
+  private void configureJobLevelMetrics(final JobRunner jobRunner) {
+    this.logger.info("Configuring Azkaban metrics tracking for jobrunner object");
     if (MetricReportManager.isAvailable()) {
-      MetricReportManager metricManager = MetricReportManager.getInstance();
+      final MetricReportManager metricManager = MetricReportManager.getInstance();
 
       // Adding NumRunningJobMetric listener
       jobRunner.addListener((NumRunningJobMetric) metricManager
@@ -875,35 +844,35 @@ public class FlowRunner extends EventHandler implements Runnable {
     jobRunner.addListener(JmxJobMBeanManager.getInstance());
   }
 
-  public void pause(String user) {
-    synchronized (mainSyncObj) {
-      if (!flowFinished) {
-        logger.info("Flow paused by " + user);
-        flowPaused = true;
-        flow.setStatus(Status.PAUSED);
+  public void pause(final String user) {
+    synchronized (this.mainSyncObj) {
+      if (!this.flowFinished) {
+        this.logger.info("Flow paused by " + user);
+        this.flowPaused = true;
+        this.flow.setStatus(Status.PAUSED);
 
         updateFlow();
       } else {
-        logger.info("Cannot pause finished flow. Called by user " + user);
+        this.logger.info("Cannot pause finished flow. Called by user " + user);
       }
     }
 
     interrupt();
   }
 
-  public void resume(String user) {
-    synchronized (mainSyncObj) {
-      if (!flowPaused) {
-        logger.info("Cannot resume flow that isn't paused");
+  public void resume(final String user) {
+    synchronized (this.mainSyncObj) {
+      if (!this.flowPaused) {
+        this.logger.info("Cannot resume flow that isn't paused");
       } else {
-        logger.info("Flow resumed by " + user);
-        flowPaused = false;
-        if (flowFailed) {
-          flow.setStatus(Status.FAILED_FINISHING);
-        } else if (flowKilled) {
-          flow.setStatus(Status.KILLED);
+        this.logger.info("Flow resumed by " + user);
+        this.flowPaused = false;
+        if (this.flowFailed) {
+          this.flow.setStatus(Status.FAILED_FINISHING);
+        } else if (this.flowKilled) {
+          this.flow.setStatus(Status.KILLED);
         } else {
-          flow.setStatus(Status.RUNNING);
+          this.flow.setStatus(Status.RUNNING);
         }
 
         updateFlow();
@@ -913,29 +882,31 @@ public class FlowRunner extends EventHandler implements Runnable {
     interrupt();
   }
 
-  public void kill(String user) {
-    logger.info("Flow killed by " + user);
+  public void kill(final String user) {
+    this.logger.info("Flow killed by " + user);
     kill();
   }
 
   public void kill() {
-    synchronized (mainSyncObj) {
-      if(flowKilled) return;
-      logger.info("Kill has been called on flow " + execId);
-      flow.setStatus(Status.KILLED);
+    synchronized (this.mainSyncObj) {
+      if (this.flowKilled) {
+        return;
+      }
+      this.logger.info("Kill has been called on flow " + this.execId);
+      this.flow.setStatus(Status.KILLED);
       // If the flow is paused, then we'll also unpause
-      flowPaused = false;
-      flowKilled = true;
-
-      if (watcher != null) {
-        logger.info("Watcher is attached. Stopping watcher.");
-        watcher.stopWatcher();
-        logger
-            .info("Watcher cancelled status is " + watcher.isWatchCancelled());
+      this.flowPaused = false;
+      this.flowKilled = true;
+
+      if (this.watcher != null) {
+        this.logger.info("Watcher is attached. Stopping watcher.");
+        this.watcher.stopWatcher();
+        this.logger
+            .info("Watcher cancelled status is " + this.watcher.isWatchCancelled());
       }
 
-      logger.info("Killing " + activeJobRunners.size() + " jobs.");
-      for (JobRunner runner : activeJobRunners) {
+      this.logger.info("Killing " + this.activeJobRunners.size() + " jobs.");
+      for (final JobRunner runner : this.activeJobRunners) {
         runner.kill();
       }
       updateFlow();
@@ -943,30 +914,30 @@ public class FlowRunner extends EventHandler implements Runnable {
     interrupt();
   }
 
-  public void retryFailures(String user) {
-    synchronized (mainSyncObj) {
-      logger.info("Retrying failures invoked by " + user);
-      retryFailedJobs = true;
+  public void retryFailures(final String user) {
+    synchronized (this.mainSyncObj) {
+      this.logger.info("Retrying failures invoked by " + user);
+      this.retryFailedJobs = true;
       interrupt();
     }
   }
 
-  private void resetFailedState(ExecutableFlowBase flow,
-      List<ExecutableNode> nodesToRetry) {
+  private void resetFailedState(final ExecutableFlowBase flow,
+      final List<ExecutableNode> nodesToRetry) {
     // bottom up
-    LinkedList<ExecutableNode> queue = new LinkedList<ExecutableNode>();
-    for (String id : flow.getEndNodes()) {
-      ExecutableNode node = flow.getExecutableNode(id);
+    final LinkedList<ExecutableNode> queue = new LinkedList<>();
+    for (final String id : flow.getEndNodes()) {
+      final ExecutableNode node = flow.getExecutableNode(id);
       queue.add(node);
     }
 
     long maxStartTime = -1;
     while (!queue.isEmpty()) {
-      ExecutableNode node = queue.poll();
-      Status oldStatus = node.getStatus();
+      final ExecutableNode node = queue.poll();
+      final Status oldStatus = node.getStatus();
       maxStartTime = Math.max(node.getStartTime(), maxStartTime);
 
-      long currentTime = System.currentTimeMillis();
+      final long currentTime = System.currentTimeMillis();
       if (node.getStatus() == Status.SUCCEEDED) {
         // This is a candidate parent for restart
         nodesToRetry.add(node);
@@ -979,26 +950,26 @@ public class FlowRunner extends EventHandler implements Runnable {
         node.setStartTime(-1);
         node.setUpdateTime(currentTime);
       } else if (node instanceof ExecutableFlowBase) {
-        ExecutableFlowBase base = (ExecutableFlowBase) node;
+        final ExecutableFlowBase base = (ExecutableFlowBase) node;
         switch (base.getStatus()) {
-        case CANCELLED:
-          node.setStatus(Status.READY);
-          node.setEndTime(-1);
-          node.setStartTime(-1);
-          node.setUpdateTime(currentTime);
-          // Break out of the switch. We'll reset the flow just like a normal
-          // node
-          break;
-        case KILLED:
-        case FAILED:
-        case FAILED_FINISHING:
-          resetFailedState(base, nodesToRetry);
-          continue;
-        default:
-          // Continue the while loop. If the job is in a finished state that's
-          // not
-          // a failure, we don't want to reset the job.
-          continue;
+          case CANCELLED:
+            node.setStatus(Status.READY);
+            node.setEndTime(-1);
+            node.setStartTime(-1);
+            node.setUpdateTime(currentTime);
+            // Break out of the switch. We'll reset the flow just like a normal
+            // node
+            break;
+          case KILLED:
+          case FAILED:
+          case FAILED_FINISHING:
+            resetFailedState(base, nodesToRetry);
+            continue;
+          default:
+            // Continue the while loop. If the job is in a finished state that's
+            // not
+            // a failure, we don't want to reset the job.
+            continue;
         }
       } else if (node.getStatus() == Status.CANCELLED) {
         // Not a flow, but killed
@@ -1014,18 +985,18 @@ public class FlowRunner extends EventHandler implements Runnable {
 
       if (!(node instanceof ExecutableFlowBase)
           && node.getStatus() != oldStatus) {
-        logger.info("Resetting job '" + node.getNestedId() + "' from "
+        this.logger.info("Resetting job '" + node.getNestedId() + "' from "
             + oldStatus + " to " + node.getStatus());
       }
 
-      for (String inId : node.getInNodes()) {
-        ExecutableNode nodeUp = flow.getExecutableNode(inId);
+      for (final String inId : node.getInNodes()) {
+        final ExecutableNode nodeUp = flow.getExecutableNode(inId);
         queue.add(nodeUp);
       }
     }
 
     // At this point, the following code will reset the flow
-    Status oldFlowState = flow.getStatus();
+    final Status oldFlowState = flow.getStatus();
     if (maxStartTime == -1) {
       // Nothing has run inside the flow, so we assume the flow hasn't even
       // started running yet.
@@ -1035,8 +1006,8 @@ public class FlowRunner extends EventHandler implements Runnable {
 
       // Add any READY start nodes. Usually it means the flow started, but the
       // start node has not.
-      for (String id : flow.getStartNodes()) {
-        ExecutableNode node = flow.getExecutableNode(id);
+      for (final String id : flow.getStartNodes()) {
+        final ExecutableNode node = flow.getExecutableNode(id);
         if (node.getStatus() == Status.READY
             || node.getStatus() == Status.DISABLED) {
           nodesToRetry.add(node);
@@ -1045,73 +1016,32 @@ public class FlowRunner extends EventHandler implements Runnable {
     }
     flow.setUpdateTime(System.currentTimeMillis());
     flow.setEndTime(-1);
-    logger.info("Resetting flow '" + flow.getNestedId() + "' from "
+    this.logger.info("Resetting flow '" + flow.getNestedId() + "' from "
         + oldFlowState + " to " + flow.getStatus());
   }
 
   private void interrupt() {
-    flowRunnerThread.interrupt();
-  }
-
-  private class JobRunnerEventListener implements EventListener {
-    public JobRunnerEventListener() {
-    }
-
-    @Override
-    public synchronized void handleEvent(Event event) {
-
-      if (event.getType() == Type.JOB_STATUS_CHANGED) {
-        updateFlow();
-      }
-      else if (event.getType() == Type.JOB_FINISHED) {
-        JobRunner runner = (JobRunner) event.getRunner();
-        ExecutableNode node = runner.getNode();
-        EventData eventData = event.getData();
-        long seconds = (node.getEndTime() - node.getStartTime()) / 1000;
-        synchronized (mainSyncObj) {
-          logger.info("Job " + eventData.getNestedId() + " finished with status "
-              + eventData.getStatus() + " in " + seconds + " seconds");
-
-          // Cancellation is handled in the main thread, but if the flow is
-          // paused, the main thread is paused too.
-          // This unpauses the flow for cancellation.
-          if (flowPaused && eventData.getStatus() == Status.FAILED
-              && failureAction == FailureAction.CANCEL_ALL) {
-            flowPaused = false;
-          }
-
-          finishedNodes.add(node);
-          node.getParentFlow().setUpdateTime(System.currentTimeMillis());
-          interrupt();
-          fireEventListeners(event);
-        }
-      }
-      else if (event.getType() == Type.JOB_STARTED) {
-        // add job level checker
-        TriggerManager triggerManager = ServiceProvider.SERVICE_PROVIDER.getInstance(TriggerManager.class);
-        triggerManager.addTrigger(flow.getExecutionId(), SlaOption.getJobLevelSLAOptions(flow));
-      }
-    }
+    this.flowRunnerThread.interrupt();
   }
 
   public boolean isKilled() {
-    return flowKilled;
+    return this.flowKilled;
   }
 
   public ExecutableFlow getExecutableFlow() {
-    return flow;
+    return this.flow;
   }
 
   public File getFlowLogFile() {
-    return logFile;
+    return this.logFile;
   }
 
-  public File getJobLogFile(String jobId, int attempt) {
-    ExecutableNode node = flow.getExecutableNodePath(jobId);
-    File path = new File(execDir, node.getJobSource());
+  public File getJobLogFile(final String jobId, final int attempt) {
+    final ExecutableNode node = this.flow.getExecutableNodePath(jobId);
+    final File path = new File(this.execDir, node.getJobSource());
 
-    String logFileName = JobRunner.createLogFileName(node, attempt);
-    File logFile = new File(path.getParentFile(), logFileName);
+    final String logFileName = JobRunner.createLogFileName(node, attempt);
+    final File logFile = new File(path.getParentFile(), logFileName);
 
     if (!logFile.exists()) {
       return null;
@@ -1120,25 +1050,25 @@ public class FlowRunner extends EventHandler implements Runnable {
     return logFile;
   }
 
-  public File getJobAttachmentFile(String jobId, int attempt) {
-    ExecutableNode node = flow.getExecutableNodePath(jobId);
-    File path = new File(execDir, node.getJobSource());
+  public File getJobAttachmentFile(final String jobId, final int attempt) {
+    final ExecutableNode node = this.flow.getExecutableNodePath(jobId);
+    final File path = new File(this.execDir, node.getJobSource());
 
-    String attachmentFileName =
+    final String attachmentFileName =
         JobRunner.createAttachmentFileName(node, attempt);
-    File attachmentFile = new File(path.getParentFile(), attachmentFileName);
+    final File attachmentFile = new File(path.getParentFile(), attachmentFileName);
     if (!attachmentFile.exists()) {
       return null;
     }
     return attachmentFile;
   }
 
-  public File getJobMetaDataFile(String jobId, int attempt) {
-    ExecutableNode node = flow.getExecutableNodePath(jobId);
-    File path = new File(execDir, node.getJobSource());
+  public File getJobMetaDataFile(final String jobId, final int attempt) {
+    final ExecutableNode node = this.flow.getExecutableNodePath(jobId);
+    final File path = new File(this.execDir, node.getJobSource());
 
-    String metaDataFileName = JobRunner.createMetaDataFileName(node, attempt);
-    File metaDataFile = new File(path.getParentFile(), metaDataFileName);
+    final String metaDataFileName = JobRunner.createMetaDataFileName(node, attempt);
+    final File metaDataFile = new File(path.getParentFile(), metaDataFileName);
 
     if (!metaDataFile.exists()) {
       return null;
@@ -1148,21 +1078,64 @@ public class FlowRunner extends EventHandler implements Runnable {
   }
 
   public boolean isRunnerThreadAlive() {
-    if (flowRunnerThread != null) {
-      return flowRunnerThread.isAlive();
+    if (this.flowRunnerThread != null) {
+      return this.flowRunnerThread.isAlive();
     }
     return false;
   }
 
   public boolean isThreadPoolShutdown() {
-    return executorService.isShutdown();
+    return this.executorService.isShutdown();
   }
 
   public int getNumRunningJobs() {
-    return activeJobRunners.size();
+    return this.activeJobRunners.size();
   }
 
   public int getExecutionId() {
-    return execId;
+    return this.execId;
+  }
+
+  private class JobRunnerEventListener implements EventListener {
+
+    public JobRunnerEventListener() {
+    }
+
+    @Override
+    public synchronized void handleEvent(final Event event) {
+
+      if (event.getType() == Type.JOB_STATUS_CHANGED) {
+        updateFlow();
+      } else if (event.getType() == Type.JOB_FINISHED) {
+        final JobRunner runner = (JobRunner) event.getRunner();
+        final ExecutableNode node = runner.getNode();
+        final EventData eventData = event.getData();
+        final long seconds = (node.getEndTime() - node.getStartTime()) / 1000;
+        synchronized (FlowRunner.this.mainSyncObj) {
+          FlowRunner.this.logger.info("Job " + eventData.getNestedId() + " finished with status "
+              + eventData.getStatus() + " in " + seconds + " seconds");
+
+          // Cancellation is handled in the main thread, but if the flow is
+          // paused, the main thread is paused too.
+          // This unpauses the flow for cancellation.
+          if (FlowRunner.this.flowPaused && eventData.getStatus() == Status.FAILED
+              && FlowRunner.this.failureAction == FailureAction.CANCEL_ALL) {
+            FlowRunner.this.flowPaused = false;
+          }
+
+          FlowRunner.this.finishedNodes.add(node);
+          node.getParentFlow().setUpdateTime(System.currentTimeMillis());
+          interrupt();
+          fireEventListeners(event);
+        }
+      } else if (event.getType() == Type.JOB_STARTED) {
+        // add job level checker
+        final TriggerManager triggerManager = ServiceProvider.SERVICE_PROVIDER
+            .getInstance(TriggerManager.class);
+        triggerManager
+            .addTrigger(FlowRunner.this.flow.getExecutionId(), SlaOption.getJobLevelSLAOptions(
+                FlowRunner.this.flow));
+      }
+    }
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunnerManager.java b/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunnerManager.java
index cd074ca..bad88bb 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunnerManager.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/FlowRunnerManager.java
@@ -17,32 +17,6 @@
 package azkaban.execapp;
 
 import azkaban.Constants;
-import azkaban.executor.Status;
-import azkaban.sla.SlaOption;
-import azkaban.storage.StorageManager;
-import com.google.inject.Inject;
-import java.io.File;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.lang.Thread.State;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.Future;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.RejectedExecutionException;
-import java.util.concurrent.TimeUnit;
-
-import java.util.stream.Collectors;
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-
 import azkaban.event.Event;
 import azkaban.event.EventListener;
 import azkaban.execapp.event.FlowWatcher;
@@ -53,12 +27,15 @@ import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutionOptions;
 import azkaban.executor.ExecutorLoader;
 import azkaban.executor.ExecutorManagerException;
+import azkaban.executor.Status;
 import azkaban.jobtype.JobTypeManager;
 import azkaban.jobtype.JobTypeManagerException;
 import azkaban.metric.MetricReportManager;
 import azkaban.project.ProjectLoader;
 import azkaban.project.ProjectWhitelist;
 import azkaban.project.ProjectWhitelist.WhitelistType;
+import azkaban.sla.SlaOption;
+import azkaban.storage.StorageManager;
 import azkaban.utils.FileIOUtils;
 import azkaban.utils.FileIOUtils.JobMetaData;
 import azkaban.utils.FileIOUtils.LogData;
@@ -67,6 +44,26 @@ import azkaban.utils.Pair;
 import azkaban.utils.Props;
 import azkaban.utils.ThreadPoolExecutingListener;
 import azkaban.utils.TrackingThreadPool;
+import com.google.inject.Inject;
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.lang.Thread.State;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Future;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.TimeUnit;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
 
 /**
  * Execution manager for the server side execution.
@@ -88,11 +85,10 @@ import azkaban.utils.TrackingThreadPool;
  * to find out the execution ids of the flows that are in the Status.PREPARING
  * status. The entries in this map is removed once the flow execution is
  * completed.
- *
- *
  */
 public class FlowRunnerManager implements EventListener,
     ThreadPoolExecutingListener {
+
   private static final Logger logger = Logger.getLogger(FlowRunnerManager.class);
 
   private static final String EXECUTOR_USE_BOUNDED_THREADPOOL_QUEUE = "executor.use.bounded.threadpool.queue";
@@ -152,75 +148,77 @@ public class FlowRunnerManager implements EventListener,
   private volatile boolean isExecutorActive = false;
 
   @Inject
-  public FlowRunnerManager(Props props,
-      ExecutorLoader executorLoader,
-      ProjectLoader projectLoader,
-      StorageManager storageManager,
-      TriggerManager triggerManager) throws IOException {
-    azkabanProps = props;
+  public FlowRunnerManager(final Props props,
+      final ExecutorLoader executorLoader,
+      final ProjectLoader projectLoader,
+      final StorageManager storageManager,
+      final TriggerManager triggerManager) throws IOException {
+    this.azkabanProps = props;
 
-    executionDirRetention = props.getLong("execution.dir.retention", executionDirRetention);
-    logger.info("Execution dir retention set to " + executionDirRetention + " ms");
+    this.executionDirRetention = props.getLong("execution.dir.retention",
+        this.executionDirRetention);
+    logger.info("Execution dir retention set to " + this.executionDirRetention + " ms");
 
-    executionDirectory = new File(props.getString("azkaban.execution.dir", "executions"));
-    if (!executionDirectory.exists()) {
-      executionDirectory.mkdirs();
+    this.executionDirectory = new File(props.getString("azkaban.execution.dir", "executions"));
+    if (!this.executionDirectory.exists()) {
+      this.executionDirectory.mkdirs();
     }
-    projectDirectory = new File(props.getString("azkaban.project.dir", "projects"));
-    if (!projectDirectory.exists()) {
-      projectDirectory.mkdirs();
+    this.projectDirectory = new File(props.getString("azkaban.project.dir", "projects"));
+    if (!this.projectDirectory.exists()) {
+      this.projectDirectory.mkdirs();
     }
 
-    installedProjects = loadExistingProjects();
+    this.installedProjects = loadExistingProjects();
 
     // azkaban.temp.dir
-    numThreads = props.getInt(EXECUTOR_FLOW_THREADS, DEFAULT_NUM_EXECUTING_FLOWS);
-    numJobThreadPerFlow = props.getInt(FLOW_NUM_JOB_THREADS, DEFAULT_FLOW_NUM_JOB_TREADS);
-    executorService = createExecutorService(numThreads);
+    this.numThreads = props.getInt(EXECUTOR_FLOW_THREADS, DEFAULT_NUM_EXECUTING_FLOWS);
+    this.numJobThreadPerFlow = props.getInt(FLOW_NUM_JOB_THREADS, DEFAULT_FLOW_NUM_JOB_TREADS);
+    this.executorService = createExecutorService(this.numThreads);
 
     // Create a flow preparer
-    flowPreparer = new FlowPreparer(storageManager, executionDirectory, projectDirectory, installedProjects);
+    this.flowPreparer = new FlowPreparer(storageManager, this.executionDirectory,
+        this.projectDirectory,
+        this.installedProjects);
 
     this.executorLoader = executorLoader;
     this.projectLoader = projectLoader;
     this.triggerManager = triggerManager;
 
-    this.jobLogChunkSize = azkabanProps.getString("job.log.chunk.size", "5MB");
-    this.jobLogNumFiles = azkabanProps.getInt("job.log.backup.index", 4);
-
-    this.validateProxyUser = azkabanProps.getBoolean("proxy.user.lock.down", false);
+    this.jobLogChunkSize = this.azkabanProps.getString("job.log.chunk.size", "5MB");
+    this.jobLogNumFiles = this.azkabanProps.getInt("job.log.backup.index", 4);
 
+    this.validateProxyUser = this.azkabanProps.getBoolean("proxy.user.lock.down", false);
 
-    cleanerThread = new CleanerThread();
-    cleanerThread.start();
+    this.cleanerThread = new CleanerThread();
+    this.cleanerThread.start();
 
-    String globalPropsPath = props.getString("executor.global.properties", null);
+    final String globalPropsPath = props.getString("executor.global.properties", null);
     if (globalPropsPath != null) {
-      globalProps = new Props(null, globalPropsPath);
+      this.globalProps = new Props(null, globalPropsPath);
     }
 
-    jobtypeManager =
+    this.jobtypeManager =
         new JobTypeManager(props.getString(
             AzkabanExecutorServer.JOBTYPE_PLUGIN_DIR,
-            JobTypeManager.DEFAULT_JOBTYPEPLUGINDIR), globalProps,
+            JobTypeManager.DEFAULT_JOBTYPEPLUGINDIR), this.globalProps,
             getClass().getClassLoader());
   }
 
-  private TrackingThreadPool createExecutorService(int nThreads) {
-    boolean useNewThreadPool =
-        azkabanProps.getBoolean(EXECUTOR_USE_BOUNDED_THREADPOOL_QUEUE, false);
+  private TrackingThreadPool createExecutorService(final int nThreads) {
+    final boolean useNewThreadPool =
+        this.azkabanProps.getBoolean(EXECUTOR_USE_BOUNDED_THREADPOOL_QUEUE, false);
     logger.info("useNewThreadPool: " + useNewThreadPool);
 
     if (useNewThreadPool) {
-      threadPoolQueueSize =
-          azkabanProps.getInt(EXECUTOR_THREADPOOL_WORKQUEUE_SIZE, nThreads);
-      logger.info("workQueueSize: " + threadPoolQueueSize);
+      this.threadPoolQueueSize =
+          this.azkabanProps.getInt(EXECUTOR_THREADPOOL_WORKQUEUE_SIZE, nThreads);
+      logger.info("workQueueSize: " + this.threadPoolQueueSize);
 
       // using a bounded queue for the work queue. The default rejection policy
       // {@ThreadPoolExecutor.AbortPolicy} is used
-      TrackingThreadPool executor =
+      final TrackingThreadPool executor =
           new TrackingThreadPool(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS,
-              new LinkedBlockingQueue<Runnable>(threadPoolQueueSize), this);
+              new LinkedBlockingQueue<>(this.threadPoolQueueSize), this);
 
       return executor;
     } else {
@@ -228,32 +226,32 @@ public class FlowRunnerManager implements EventListener,
       // if the running tasks are taking a long time or stuck, this queue
       // will be very very long.
       return new TrackingThreadPool(nThreads, nThreads, 0L,
-          TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(), this);
+          TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), this);
     }
   }
 
   private Map<Pair<Integer, Integer>, ProjectVersion> loadExistingProjects() {
-    Map<Pair<Integer, Integer>, ProjectVersion> allProjects =
-        new HashMap<Pair<Integer, Integer>, ProjectVersion>();
-    for (File project : projectDirectory.listFiles(new FilenameFilter() {
+    final Map<Pair<Integer, Integer>, ProjectVersion> allProjects =
+        new HashMap<>();
+    for (final File project : this.projectDirectory.listFiles(new FilenameFilter() {
 
       String pattern = "[0-9]+\\.[0-9]+";
 
       @Override
-      public boolean accept(File dir, String name) {
-        return name.matches(pattern);
+      public boolean accept(final File dir, final String name) {
+        return name.matches(this.pattern);
       }
     })) {
       if (project.isDirectory()) {
         try {
-          String fileName = new File(project.getAbsolutePath()).getName();
-          int projectId = Integer.parseInt(fileName.split("\\.")[0]);
-          int versionNum = Integer.parseInt(fileName.split("\\.")[1]);
-          ProjectVersion version =
+          final String fileName = new File(project.getAbsolutePath()).getName();
+          final int projectId = Integer.parseInt(fileName.split("\\.")[0]);
+          final int versionNum = Integer.parseInt(fileName.split("\\.")[1]);
+          final ProjectVersion version =
               new ProjectVersion(projectId, versionNum, project);
-          allProjects.put(new Pair<Integer, Integer>(projectId, versionNum),
+          allProjects.put(new Pair<>(projectId, versionNum),
               version);
-        } catch (Exception e) {
+        } catch (final Exception e) {
           e.printStackTrace();
         }
       }
@@ -261,211 +259,26 @@ public class FlowRunnerManager implements EventListener,
     return allProjects;
   }
 
-  public void setExecutorActive(boolean isActive) {
+  public void setExecutorActive(final boolean isActive) {
     this.isExecutorActive = isActive;
   }
 
-  public long getLastFlowSubmittedTime(){
+  public long getLastFlowSubmittedTime() {
     // Note: this is not thread safe and may result in providing dirty data.
     //       we will provide this data as is for now and will revisit if there
     //       is a string justification for change.
-    return lastFlowSubmittedDate;
+    return this.lastFlowSubmittedDate;
   }
 
   public Props getGlobalProps() {
-    return globalProps;
+    return this.globalProps;
   }
 
-  public void setGlobalProps(Props globalProps) {
+  public void setGlobalProps(final Props globalProps) {
     this.globalProps = globalProps;
   }
 
-  private class CleanerThread extends Thread {
-    // Every hour, clean execution dir.
-    private static final long EXECUTION_DIR_CLEAN_INTERVAL_MS = 60 * 60 * 1000;
-    // Every 5 mins clean the old project dir
-    private static final long OLD_PROJECT_DIR_INTERVAL_MS = 5 * 60 * 1000;
-    // Every 2 mins clean the recently finished list
-    private static final long RECENTLY_FINISHED_INTERVAL_MS = 2 * 60 * 1000;
-
-    // Every 5 mins kill flows running longer than allowed max running time
-    private static final long LONG_RUNNING_FLOW_KILLING_INTERVAL_MS = 5 * 60 * 1000;
-
-    private boolean shutdown = false;
-    private long lastExecutionDirCleanTime = -1;
-    private long lastOldProjectCleanTime = -1;
-    private long lastRecentlyFinishedCleanTime = -1;
-    private long lastLongRunningFlowCleanTime = -1;
-    private final long flowMaxRunningTimeInMins = azkabanProps.getInt(
-        Constants.ConfigurationKeys.AZKABAN_MAX_FLOW_RUNNING_MINS, -1);
-
-    public CleanerThread() {
-      this.setName("FlowRunnerManager-Cleaner-Thread");
-      setDaemon(true);
-    }
-
-    @SuppressWarnings("unused")
-    public void shutdown() {
-      shutdown = true;
-      this.interrupt();
-    }
-
-    private boolean isFlowRunningLongerThan(ExecutableFlow flow, long flowMaxRunningTimeInMins) {
-      Set<Status> nonFinishingStatusAfterFlowStarts = new HashSet<>(Arrays.asList(Status.RUNNING, Status.QUEUED, Status.PAUSED, Status.FAILED_FINISHING));
-      return nonFinishingStatusAfterFlowStarts.contains(flow.getStatus()) && flow.getStartTime() > 0 && TimeUnit.MILLISECONDS.toMinutes(System.currentTimeMillis()-flow.getStartTime()) >= flowMaxRunningTimeInMins;
-    }
-
-    @Override
-    public void run() {
-      while (!shutdown) {
-        synchronized (this) {
-          try {
-            lastCleanerThreadCheckTime = System.currentTimeMillis();
-            logger.info("# of executing flows: " + getNumRunningFlows());
-
-            // Cleanup old stuff.
-            long currentTime = System.currentTimeMillis();
-            if (currentTime - RECENTLY_FINISHED_INTERVAL_MS > lastRecentlyFinishedCleanTime) {
-              logger.info("Cleaning recently finished");
-              cleanRecentlyFinished();
-              lastRecentlyFinishedCleanTime = currentTime;
-            }
-
-            if (currentTime - OLD_PROJECT_DIR_INTERVAL_MS > lastOldProjectCleanTime && isExecutorActive) {
-              logger.info("Cleaning old projects");
-              cleanOlderProjects();
-              lastOldProjectCleanTime = currentTime;
-            }
-
-            if (currentTime - EXECUTION_DIR_CLEAN_INTERVAL_MS > lastExecutionDirCleanTime) {
-              logger.info("Cleaning old execution dirs");
-              cleanOlderExecutionDirs();
-              lastExecutionDirCleanTime = currentTime;
-            }
-
-            if (flowMaxRunningTimeInMins > 0 && currentTime - LONG_RUNNING_FLOW_KILLING_INTERVAL_MS > lastLongRunningFlowCleanTime) {
-              logger.info(String.format("Killing long jobs running longer than %s mins", flowMaxRunningTimeInMins));
-              for (FlowRunner flowRunner : runningFlows.values()) {
-                if (isFlowRunningLongerThan(flowRunner.getExecutableFlow(), flowMaxRunningTimeInMins)) {
-                  logger.info(String.format("Killing job [id: %s, status: %s]. It has been running for %s mins", flowRunner.getExecutableFlow().getId(), flowRunner.getExecutableFlow().getStatus(), TimeUnit.MILLISECONDS.toMinutes(System.currentTimeMillis()-flowRunner.getExecutableFlow().getStartTime())));
-                  flowRunner.kill();
-                }
-              }
-              lastLongRunningFlowCleanTime = currentTime;
-            }
-
-            wait(RECENTLY_FINISHED_TIME_TO_LIVE);
-          } catch (InterruptedException e) {
-            logger.info("Interrupted. Probably to shut down.");
-          } catch (Throwable t) {
-            logger.warn(
-                "Uncaught throwable, please look into why it is not caught", t);
-          }
-        }
-      }
-    }
-
-    private void cleanOlderExecutionDirs() {
-      File dir = executionDirectory;
-
-      final long pastTimeThreshold =
-          System.currentTimeMillis() - executionDirRetention;
-      File[] executionDirs = dir.listFiles(path -> path.isDirectory() && path.lastModified() < pastTimeThreshold);
-
-      for (File exDir : executionDirs) {
-        try {
-          int execId = Integer.valueOf(exDir.getName());
-          if (runningFlows.containsKey(execId)
-              || recentlyFinishedFlows.containsKey(execId)) {
-            continue;
-          }
-        } catch (NumberFormatException e) {
-          logger.error("Can't delete exec dir " + exDir.getName()
-              + " it is not a number");
-          continue;
-        }
-
-        synchronized (executionDirDeletionSync) {
-          try {
-            FileUtils.deleteDirectory(exDir);
-          } catch (IOException e) {
-            logger.error("Error cleaning execution dir " + exDir.getPath(), e);
-          }
-        }
-      }
-    }
-
-    private void cleanRecentlyFinished() {
-      long cleanupThreshold =
-          System.currentTimeMillis() - RECENTLY_FINISHED_TIME_TO_LIVE;
-      ArrayList<Integer> executionToKill = new ArrayList<Integer>();
-      for (ExecutableFlow flow : recentlyFinishedFlows.values()) {
-        if (flow.getEndTime() < cleanupThreshold) {
-          executionToKill.add(flow.getExecutionId());
-        }
-      }
-
-      for (Integer id : executionToKill) {
-        logger.info("Cleaning execution " + id
-            + " from recently finished flows list.");
-        recentlyFinishedFlows.remove(id);
-      }
-    }
-
-    private void cleanOlderProjects() {
-      Map<Integer, ArrayList<ProjectVersion>> projectVersions =
-          new HashMap<Integer, ArrayList<ProjectVersion>>();
-      for (ProjectVersion version : installedProjects.values()) {
-        ArrayList<ProjectVersion> versionList =
-            projectVersions.get(version.getProjectId());
-        if (versionList == null) {
-          versionList = new ArrayList<ProjectVersion>();
-          projectVersions.put(version.getProjectId(), versionList);
-        }
-        versionList.add(version);
-      }
-
-      HashSet<Pair<Integer, Integer>> activeProjectVersions =
-          new HashSet<Pair<Integer, Integer>>();
-      for (FlowRunner runner : runningFlows.values()) {
-        ExecutableFlow flow = runner.getExecutableFlow();
-        activeProjectVersions.add(new Pair<Integer, Integer>(flow
-            .getProjectId(), flow.getVersion()));
-      }
-
-      for (Map.Entry<Integer, ArrayList<ProjectVersion>> entry : projectVersions
-          .entrySet()) {
-        // Integer projectId = entry.getKey();
-        ArrayList<ProjectVersion> installedVersions = entry.getValue();
-
-        // Keep one version of the project around.
-        if (installedVersions.size() == 1) {
-          continue;
-        }
-
-        Collections.sort(installedVersions);
-        for (int i = 0; i < installedVersions.size() - 1; ++i) {
-          ProjectVersion version = installedVersions.get(i);
-          Pair<Integer, Integer> versionKey =
-              new Pair<Integer, Integer>(version.getProjectId(),
-                  version.getVersion());
-          if (!activeProjectVersions.contains(versionKey)) {
-            try {
-              logger.info("Removing old unused installed project "
-                  + version.getProjectId() + ":" + version.getVersion());
-              deleteDirectory(version);
-              installedProjects.remove(new Pair<Integer, Integer>(version
-                  .getProjectId(), version.getVersion()));
-            } catch (IOException e) {
-              logger.error(e);
-            }
-          }
-        }
-      }
-    }
-  }
-
-  public void deleteDirectory(ProjectVersion pv) throws IOException {
+  public void deleteDirectory(final ProjectVersion pv) throws IOException {
     synchronized (pv) {
       logger.warn("Deleting project: " + pv);
       final File installedDir = pv.getInstalledDir();
@@ -475,49 +288,49 @@ public class FlowRunnerManager implements EventListener,
     }
   }
 
-  public void submitFlow(int execId) throws ExecutorManagerException {
+  public void submitFlow(final int execId) throws ExecutorManagerException {
     // Load file and submit
-    if (runningFlows.containsKey(execId)) {
+    if (this.runningFlows.containsKey(execId)) {
       throw new ExecutorManagerException("Execution " + execId
           + " is already running.");
     }
 
     ExecutableFlow flow = null;
-    flow = executorLoader.fetchExecutableFlow(execId);
+    flow = this.executorLoader.fetchExecutableFlow(execId);
     if (flow == null) {
       throw new ExecutorManagerException("Error loading flow with exec "
           + execId);
     }
 
     // Sets up the project files and execution directory.
-    flowPreparer.setup(flow);
+    this.flowPreparer.setup(flow);
 
     // Setup flow runner
     FlowWatcher watcher = null;
-    ExecutionOptions options = flow.getExecutionOptions();
+    final ExecutionOptions options = flow.getExecutionOptions();
     if (options.getPipelineExecutionId() != null) {
-      Integer pipelineExecId = options.getPipelineExecutionId();
-      FlowRunner runner = runningFlows.get(pipelineExecId);
+      final Integer pipelineExecId = options.getPipelineExecutionId();
+      final FlowRunner runner = this.runningFlows.get(pipelineExecId);
 
       if (runner != null) {
         watcher = new LocalFlowWatcher(runner);
       } else {
-        watcher = new RemoteFlowWatcher(pipelineExecId, executorLoader);
+        watcher = new RemoteFlowWatcher(pipelineExecId, this.executorLoader);
       }
     }
 
-    int numJobThreads = numJobThreadPerFlow;
+    int numJobThreads = this.numJobThreadPerFlow;
     if (options.getFlowParameters().containsKey(FLOW_NUM_JOB_THREADS)) {
       try {
-        int numJobs =
+        final int numJobs =
             Integer.valueOf(options.getFlowParameters().get(
                 FLOW_NUM_JOB_THREADS));
         if (numJobs > 0 && (numJobs <= numJobThreads || ProjectWhitelist
-                .isProjectWhitelisted(flow.getProjectId(),
-                    WhitelistType.NumJobPerFlow))) {
+            .isProjectWhitelisted(flow.getProjectId(),
+                WhitelistType.NumJobPerFlow))) {
           numJobThreads = numJobs;
         }
-      } catch (Exception e) {
+      } catch (final Exception e) {
         throw new ExecutorManagerException(
             "Failed to set the number of job threads "
                 + options.getFlowParameters().get(FLOW_NUM_JOB_THREADS)
@@ -525,35 +338,36 @@ public class FlowRunnerManager implements EventListener,
       }
     }
 
-    FlowRunner runner =
-        new FlowRunner(flow, executorLoader, projectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner =
+        new FlowRunner(flow, this.executorLoader, this.projectLoader, this.jobtypeManager,
+            this.azkabanProps);
     runner.setFlowWatcher(watcher)
-        .setJobLogSettings(jobLogChunkSize, jobLogNumFiles)
-        .setValidateProxyUser(validateProxyUser)
+        .setJobLogSettings(this.jobLogChunkSize, this.jobLogNumFiles)
+        .setValidateProxyUser(this.validateProxyUser)
         .setNumJobThreads(numJobThreads).addListener(this);
 
     configureFlowLevelMetrics(runner);
 
     // Check again.
-    if (runningFlows.containsKey(execId)) {
+    if (this.runningFlows.containsKey(execId)) {
       throw new ExecutorManagerException("Execution " + execId
           + " is already running.");
     }
 
     // Finally, queue the sucker.
-    runningFlows.put(execId, runner);
+    this.runningFlows.put(execId, runner);
 
     try {
       // The executorService already has a queue.
       // The submit method below actually returns an instance of FutureTask,
       // which implements interface RunnableFuture, which extends both
       // Runnable and Future interfaces
-      Future<?> future = executorService.submit(runner);
+      final Future<?> future = this.executorService.submit(runner);
       // keep track of this future
-      submittedFlows.put(future, runner.getExecutionId());
+      this.submittedFlows.put(future, runner.getExecutionId());
       // update the last submitted time.
       this.lastFlowSubmittedDate = System.currentTimeMillis();
-    } catch (RejectedExecutionException re) {
+    } catch (final RejectedExecutionException re) {
       throw new ExecutorManagerException(
           "Azkaban server can't execute any more flows. "
               + "The number of running flows has reached the system configured limit."
@@ -563,14 +377,12 @@ public class FlowRunnerManager implements EventListener,
 
   /**
    * Configure Azkaban metrics tracking for a new flowRunner instance
-   *
-   * @param flowRunner
    */
-  private void configureFlowLevelMetrics(FlowRunner flowRunner) {
+  private void configureFlowLevelMetrics(final FlowRunner flowRunner) {
     logger.info("Configuring Azkaban metrics tracking for flow runner object");
 
     if (MetricReportManager.isAvailable()) {
-      MetricReportManager metricManager = MetricReportManager.getInstance();
+      final MetricReportManager metricManager = MetricReportManager.getInstance();
       // Adding NumFailedFlow Metric listener
       flowRunner.addListener((NumFailedFlowMetric) metricManager
           .getMetricFromName(NumFailedFlowMetric.NUM_FAILED_FLOW_METRIC_NAME));
@@ -578,9 +390,9 @@ public class FlowRunnerManager implements EventListener,
 
   }
 
-  public void cancelFlow(int execId, String user)
+  public void cancelFlow(final int execId, final String user)
       throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+    final FlowRunner runner = this.runningFlows.get(execId);
 
     if (runner == null) {
       throw new ExecutorManagerException("Execution " + execId
@@ -590,9 +402,9 @@ public class FlowRunnerManager implements EventListener,
     runner.kill(user);
   }
 
-  public void pauseFlow(int execId, String user)
+  public void pauseFlow(final int execId, final String user)
       throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+    final FlowRunner runner = this.runningFlows.get(execId);
 
     if (runner == null) {
       throw new ExecutorManagerException("Execution " + execId
@@ -602,9 +414,9 @@ public class FlowRunnerManager implements EventListener,
     runner.pause(user);
   }
 
-  public void resumeFlow(int execId, String user)
+  public void resumeFlow(final int execId, final String user)
       throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+    final FlowRunner runner = this.runningFlows.get(execId);
 
     if (runner == null) {
       throw new ExecutorManagerException("Execution " + execId
@@ -614,9 +426,9 @@ public class FlowRunnerManager implements EventListener,
     runner.resume(user);
   }
 
-  public void retryFailures(int execId, String user)
+  public void retryFailures(final int execId, final String user)
       throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+    final FlowRunner runner = this.runningFlows.get(execId);
 
     if (runner == null) {
       throw new ExecutorManagerException("Execution " + execId
@@ -626,58 +438,58 @@ public class FlowRunnerManager implements EventListener,
     runner.retryFailures(user);
   }
 
-  public ExecutableFlow getExecutableFlow(int execId) {
-    FlowRunner runner = runningFlows.get(execId);
+  public ExecutableFlow getExecutableFlow(final int execId) {
+    final FlowRunner runner = this.runningFlows.get(execId);
     if (runner == null) {
-      return recentlyFinishedFlows.get(execId);
+      return this.recentlyFinishedFlows.get(execId);
     }
     return runner.getExecutableFlow();
   }
 
   @Override
-  public void handleEvent(Event event) {
+  public void handleEvent(final Event event) {
     if (event.getType() == Event.Type.FLOW_FINISHED || event.getType() == Event.Type.FLOW_STARTED) {
-      FlowRunner flowRunner = (FlowRunner) event.getRunner();
-      ExecutableFlow flow = flowRunner.getExecutableFlow();
+      final FlowRunner flowRunner = (FlowRunner) event.getRunner();
+      final ExecutableFlow flow = flowRunner.getExecutableFlow();
 
       if (event.getType() == Event.Type.FLOW_FINISHED) {
-        recentlyFinishedFlows.put(flow.getExecutionId(), flow);
+        this.recentlyFinishedFlows.put(flow.getExecutionId(), flow);
         logger.info("Flow " + flow.getExecutionId()
             + " is finished. Adding it to recently finished flows list.");
-        runningFlows.remove(flow.getExecutionId());
-      }
-      else if (event.getType() == Event.Type.FLOW_STARTED) {
+        this.runningFlows.remove(flow.getExecutionId());
+      } else if (event.getType() == Event.Type.FLOW_STARTED) {
         // add flow level SLA checker
-        triggerManager.addTrigger(flow.getExecutionId(), SlaOption.getFlowLevelSLAOptions(flow));
+        this.triggerManager
+            .addTrigger(flow.getExecutionId(), SlaOption.getFlowLevelSLAOptions(flow));
       }
     }
   }
 
-  public LogData readFlowLogs(int execId, int startByte, int length)
+  public LogData readFlowLogs(final int execId, final int startByte, final int length)
       throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+    final FlowRunner runner = this.runningFlows.get(execId);
     if (runner == null) {
       throw new ExecutorManagerException("Running flow " + execId
           + " not found.");
     }
 
-    File dir = runner.getExecutionDir();
+    final File dir = runner.getExecutionDir();
     if (dir != null && dir.exists()) {
       try {
-        synchronized (executionDirDeletionSync) {
+        synchronized (this.executionDirDeletionSync) {
           if (!dir.exists()) {
             throw new ExecutorManagerException(
                 "Execution dir file doesn't exist. Probably has beend deleted");
           }
 
-          File logFile = runner.getFlowLogFile();
+          final File logFile = runner.getFlowLogFile();
           if (logFile != null && logFile.exists()) {
             return FileIOUtils.readUtf8File(logFile, startByte, length);
           } else {
             throw new ExecutorManagerException("Flow log file doesn't exist.");
           }
         }
-      } catch (IOException e) {
+      } catch (final IOException e) {
         throw new ExecutorManagerException(e);
       }
     }
@@ -686,30 +498,30 @@ public class FlowRunnerManager implements EventListener,
         "Error reading file. Log directory doesn't exist.");
   }
 
-  public LogData readJobLogs(int execId, String jobId, int attempt,
-      int startByte, int length) throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+  public LogData readJobLogs(final int execId, final String jobId, final int attempt,
+      final int startByte, final int length) throws ExecutorManagerException {
+    final FlowRunner runner = this.runningFlows.get(execId);
     if (runner == null) {
       throw new ExecutorManagerException("Running flow " + execId
           + " not found.");
     }
 
-    File dir = runner.getExecutionDir();
+    final File dir = runner.getExecutionDir();
     if (dir != null && dir.exists()) {
       try {
-        synchronized (executionDirDeletionSync) {
+        synchronized (this.executionDirDeletionSync) {
           if (!dir.exists()) {
             throw new ExecutorManagerException(
                 "Execution dir file doesn't exist. Probably has beend deleted");
           }
-          File logFile = runner.getJobLogFile(jobId, attempt);
+          final File logFile = runner.getJobLogFile(jobId, attempt);
           if (logFile != null && logFile.exists()) {
             return FileIOUtils.readUtf8File(logFile, startByte, length);
           } else {
             throw new ExecutorManagerException("Job log file doesn't exist.");
           }
         }
-      } catch (IOException e) {
+      } catch (final IOException e) {
         throw new ExecutorManagerException(e);
       }
     }
@@ -718,60 +530,59 @@ public class FlowRunnerManager implements EventListener,
         "Error reading file. Log directory doesn't exist.");
   }
 
-  public List<Object> readJobAttachments(int execId, String jobId, int attempt)
+  public List<Object> readJobAttachments(final int execId, final String jobId, final int attempt)
       throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+    final FlowRunner runner = this.runningFlows.get(execId);
     if (runner == null) {
       throw new ExecutorManagerException("Running flow " + execId
           + " not found.");
     }
 
-    File dir = runner.getExecutionDir();
+    final File dir = runner.getExecutionDir();
     if (dir == null || !dir.exists()) {
       throw new ExecutorManagerException(
           "Error reading file. Log directory doesn't exist.");
     }
 
     try {
-      synchronized (executionDirDeletionSync) {
+      synchronized (this.executionDirDeletionSync) {
         if (!dir.exists()) {
           throw new ExecutorManagerException(
               "Execution dir file doesn't exist. Probably has beend deleted");
         }
 
-        File attachmentFile = runner.getJobAttachmentFile(jobId, attempt);
+        final File attachmentFile = runner.getJobAttachmentFile(jobId, attempt);
         if (attachmentFile == null || !attachmentFile.exists()) {
           return null;
         }
 
-        @SuppressWarnings("unchecked")
-        List<Object> jobAttachments =
+        final List<Object> jobAttachments =
             (ArrayList<Object>) JSONUtils.parseJSONFromFile(attachmentFile);
 
         return jobAttachments;
       }
-    } catch (IOException e) {
+    } catch (final IOException e) {
       throw new ExecutorManagerException(e);
     }
   }
 
-  public JobMetaData readJobMetaData(int execId, String jobId, int attempt,
-      int startByte, int length) throws ExecutorManagerException {
-    FlowRunner runner = runningFlows.get(execId);
+  public JobMetaData readJobMetaData(final int execId, final String jobId, final int attempt,
+      final int startByte, final int length) throws ExecutorManagerException {
+    final FlowRunner runner = this.runningFlows.get(execId);
     if (runner == null) {
       throw new ExecutorManagerException("Running flow " + execId
           + " not found.");
     }
 
-    File dir = runner.getExecutionDir();
+    final File dir = runner.getExecutionDir();
     if (dir != null && dir.exists()) {
       try {
-        synchronized (executionDirDeletionSync) {
+        synchronized (this.executionDirDeletionSync) {
           if (!dir.exists()) {
             throw new ExecutorManagerException(
                 "Execution dir file doesn't exist. Probably has beend deleted");
           }
-          File metaDataFile = runner.getJobMetaDataFile(jobId, attempt);
+          final File metaDataFile = runner.getJobMetaDataFile(jobId, attempt);
           if (metaDataFile != null && metaDataFile.exists()) {
             return FileIOUtils.readUtf8MetaDataFile(metaDataFile, startByte,
                 length);
@@ -779,7 +590,7 @@ public class FlowRunnerManager implements EventListener,
             throw new ExecutorManagerException("Job log file doesn't exist.");
           }
         }
-      } catch (IOException e) {
+      } catch (final IOException e) {
         throw new ExecutorManagerException(e);
       }
     }
@@ -789,7 +600,7 @@ public class FlowRunnerManager implements EventListener,
   }
 
   public long getLastCleanerThreadCheckTime() {
-    return lastCleanerThreadCheckTime;
+    return this.lastCleanerThreadCheckTime;
   }
 
   public boolean isCleanerThreadActive() {
@@ -801,28 +612,28 @@ public class FlowRunnerManager implements EventListener,
   }
 
   public boolean isExecutorThreadPoolShutdown() {
-    return executorService.isShutdown();
+    return this.executorService.isShutdown();
   }
 
   public int getNumQueuedFlows() {
-    return executorService.getQueue().size();
+    return this.executorService.getQueue().size();
   }
 
   public int getNumRunningFlows() {
-    return executorService.getActiveCount();
+    return this.executorService.getActiveCount();
   }
 
   public String getRunningFlowIds() {
     // The in progress tasks are actually of type FutureTask
-    Set<Runnable> inProgressTasks = executorService.getInProgressTasks();
+    final Set<Runnable> inProgressTasks = this.executorService.getInProgressTasks();
 
-    List<Integer> runningFlowIds =
-        new ArrayList<Integer>(inProgressTasks.size());
+    final List<Integer> runningFlowIds =
+        new ArrayList<>(inProgressTasks.size());
 
-    for (Runnable task : inProgressTasks) {
+    for (final Runnable task : inProgressTasks) {
       // add casting here to ensure it matches the expected type in
       // submittedFlows
-      Integer execId = submittedFlows.get((Future<?>) task);
+      final Integer execId = this.submittedFlows.get((Future<?>) task);
       if (execId != null) {
         runningFlowIds.add(execId);
       } else {
@@ -835,11 +646,11 @@ public class FlowRunnerManager implements EventListener,
   }
 
   public String getQueuedFlowIds() {
-    List<Integer> flowIdList =
-        new ArrayList<Integer>(executorService.getQueue().size());
+    final List<Integer> flowIdList =
+        new ArrayList<>(this.executorService.getQueue().size());
 
-    for (Runnable task : executorService.getQueue()) {
-      Integer execId = submittedFlows.get(task);
+    for (final Runnable task : this.executorService.getQueue()) {
+      final Integer execId = this.submittedFlows.get(task);
       if (execId != null) {
         flowIdList.add(execId);
       } else {
@@ -852,28 +663,28 @@ public class FlowRunnerManager implements EventListener,
   }
 
   public int getMaxNumRunningFlows() {
-    return numThreads;
+    return this.numThreads;
   }
 
   public int getTheadPoolQueueSize() {
-    return threadPoolQueueSize;
+    return this.threadPoolQueueSize;
   }
 
   public void reloadJobTypePlugins() throws JobTypeManagerException {
-    jobtypeManager.loadPlugins();
+    this.jobtypeManager.loadPlugins();
   }
 
   public int getTotalNumExecutedFlows() {
-    return executorService.getTotalTasks();
+    return this.executorService.getTotalTasks();
   }
 
   @Override
-  public void beforeExecute(Runnable r) {
+  public void beforeExecute(final Runnable r) {
   }
 
   @Override
-  public void afterExecute(Runnable r) {
-    submittedFlows.remove(r);
+  public void afterExecute(final Runnable r) {
+    this.submittedFlows.remove(r);
   }
 
   /**
@@ -881,13 +692,13 @@ public class FlowRunnerManager implements EventListener,
    */
   public void shutdown() {
     logger.warn("Shutting down FlowRunnerManager...");
-    executorService.shutdown();
+    this.executorService.shutdown();
     boolean result = false;
     while (!result) {
       logger.info("Awaiting Shutdown. # of executing flows: " + getNumRunningFlows());
       try {
-        result = executorService.awaitTermination(1, TimeUnit.MINUTES);
-      } catch (InterruptedException e) {
+        result = this.executorService.awaitTermination(1, TimeUnit.MINUTES);
+      } catch (final InterruptedException e) {
         logger.error(e);
       }
     }
@@ -900,8 +711,207 @@ public class FlowRunnerManager implements EventListener,
    */
   public void shutdownNow() {
     logger.warn("Shutting down FlowRunnerManager now...");
-    executorService.shutdownNow();
-    triggerManager.shutdown();
+    this.executorService.shutdownNow();
+    this.triggerManager.shutdown();
+  }
+
+  private class CleanerThread extends Thread {
+
+    // Every hour, clean execution dir.
+    private static final long EXECUTION_DIR_CLEAN_INTERVAL_MS = 60 * 60 * 1000;
+    // Every 5 mins clean the old project dir
+    private static final long OLD_PROJECT_DIR_INTERVAL_MS = 5 * 60 * 1000;
+    // Every 2 mins clean the recently finished list
+    private static final long RECENTLY_FINISHED_INTERVAL_MS = 2 * 60 * 1000;
+
+    // Every 5 mins kill flows running longer than allowed max running time
+    private static final long LONG_RUNNING_FLOW_KILLING_INTERVAL_MS = 5 * 60 * 1000;
+    private final long flowMaxRunningTimeInMins = FlowRunnerManager.this.azkabanProps.getInt(
+        Constants.ConfigurationKeys.AZKABAN_MAX_FLOW_RUNNING_MINS, -1);
+    private boolean shutdown = false;
+    private long lastExecutionDirCleanTime = -1;
+    private long lastOldProjectCleanTime = -1;
+    private long lastRecentlyFinishedCleanTime = -1;
+    private long lastLongRunningFlowCleanTime = -1;
+
+    public CleanerThread() {
+      this.setName("FlowRunnerManager-Cleaner-Thread");
+      setDaemon(true);
+    }
+
+    public void shutdown() {
+      this.shutdown = true;
+      this.interrupt();
+    }
+
+    private boolean isFlowRunningLongerThan(final ExecutableFlow flow,
+        final long flowMaxRunningTimeInMins) {
+      final Set<Status> nonFinishingStatusAfterFlowStarts = new HashSet<>(
+          Arrays.asList(Status.RUNNING, Status.QUEUED, Status.PAUSED, Status.FAILED_FINISHING));
+      return nonFinishingStatusAfterFlowStarts.contains(flow.getStatus()) && flow.getStartTime() > 0
+          && TimeUnit.MILLISECONDS.toMinutes(System.currentTimeMillis() - flow.getStartTime())
+          >= flowMaxRunningTimeInMins;
+    }
+
+    @Override
+    public void run() {
+      while (!this.shutdown) {
+        synchronized (this) {
+          try {
+            FlowRunnerManager.this.lastCleanerThreadCheckTime = System.currentTimeMillis();
+            logger.info("# of executing flows: " + getNumRunningFlows());
+
+            // Cleanup old stuff.
+            final long currentTime = System.currentTimeMillis();
+            if (currentTime - RECENTLY_FINISHED_INTERVAL_MS > this.lastRecentlyFinishedCleanTime) {
+              logger.info("Cleaning recently finished");
+              cleanRecentlyFinished();
+              this.lastRecentlyFinishedCleanTime = currentTime;
+            }
+
+            if (currentTime - OLD_PROJECT_DIR_INTERVAL_MS > this.lastOldProjectCleanTime
+                && FlowRunnerManager.this.isExecutorActive) {
+              logger.info("Cleaning old projects");
+              cleanOlderProjects();
+              this.lastOldProjectCleanTime = currentTime;
+            }
+
+            if (currentTime - EXECUTION_DIR_CLEAN_INTERVAL_MS > this.lastExecutionDirCleanTime) {
+              logger.info("Cleaning old execution dirs");
+              cleanOlderExecutionDirs();
+              this.lastExecutionDirCleanTime = currentTime;
+            }
+
+            if (this.flowMaxRunningTimeInMins > 0
+                && currentTime - LONG_RUNNING_FLOW_KILLING_INTERVAL_MS
+                > this.lastLongRunningFlowCleanTime) {
+              logger.info(String.format("Killing long jobs running longer than %s mins",
+                  this.flowMaxRunningTimeInMins));
+              for (final FlowRunner flowRunner : FlowRunnerManager.this.runningFlows.values()) {
+                if (isFlowRunningLongerThan(flowRunner.getExecutableFlow(),
+                    this.flowMaxRunningTimeInMins)) {
+                  logger.info(String
+                      .format("Killing job [id: %s, status: %s]. It has been running for %s mins",
+                          flowRunner.getExecutableFlow().getId(),
+                          flowRunner.getExecutableFlow().getStatus(), TimeUnit.MILLISECONDS
+                              .toMinutes(System.currentTimeMillis() - flowRunner.getExecutableFlow()
+                                  .getStartTime())));
+                  flowRunner.kill();
+                }
+              }
+              this.lastLongRunningFlowCleanTime = currentTime;
+            }
+
+            wait(RECENTLY_FINISHED_TIME_TO_LIVE);
+          } catch (final InterruptedException e) {
+            logger.info("Interrupted. Probably to shut down.");
+          } catch (final Throwable t) {
+            logger.warn(
+                "Uncaught throwable, please look into why it is not caught", t);
+          }
+        }
+      }
+    }
+
+    private void cleanOlderExecutionDirs() {
+      final File dir = FlowRunnerManager.this.executionDirectory;
+
+      final long pastTimeThreshold =
+          System.currentTimeMillis() - FlowRunnerManager.this.executionDirRetention;
+      final File[] executionDirs = dir
+          .listFiles(path -> path.isDirectory() && path.lastModified() < pastTimeThreshold);
+
+      for (final File exDir : executionDirs) {
+        try {
+          final int execId = Integer.valueOf(exDir.getName());
+          if (FlowRunnerManager.this.runningFlows.containsKey(execId)
+              || FlowRunnerManager.this.recentlyFinishedFlows.containsKey(execId)) {
+            continue;
+          }
+        } catch (final NumberFormatException e) {
+          logger.error("Can't delete exec dir " + exDir.getName()
+              + " it is not a number");
+          continue;
+        }
+
+        synchronized (FlowRunnerManager.this.executionDirDeletionSync) {
+          try {
+            FileUtils.deleteDirectory(exDir);
+          } catch (final IOException e) {
+            logger.error("Error cleaning execution dir " + exDir.getPath(), e);
+          }
+        }
+      }
+    }
+
+    private void cleanRecentlyFinished() {
+      final long cleanupThreshold =
+          System.currentTimeMillis() - RECENTLY_FINISHED_TIME_TO_LIVE;
+      final ArrayList<Integer> executionToKill = new ArrayList<>();
+      for (final ExecutableFlow flow : FlowRunnerManager.this.recentlyFinishedFlows.values()) {
+        if (flow.getEndTime() < cleanupThreshold) {
+          executionToKill.add(flow.getExecutionId());
+        }
+      }
+
+      for (final Integer id : executionToKill) {
+        logger.info("Cleaning execution " + id
+            + " from recently finished flows list.");
+        FlowRunnerManager.this.recentlyFinishedFlows.remove(id);
+      }
+    }
+
+    private void cleanOlderProjects() {
+      final Map<Integer, ArrayList<ProjectVersion>> projectVersions =
+          new HashMap<>();
+      for (final ProjectVersion version : FlowRunnerManager.this.installedProjects.values()) {
+        ArrayList<ProjectVersion> versionList =
+            projectVersions.get(version.getProjectId());
+        if (versionList == null) {
+          versionList = new ArrayList<>();
+          projectVersions.put(version.getProjectId(), versionList);
+        }
+        versionList.add(version);
+      }
+
+      final HashSet<Pair<Integer, Integer>> activeProjectVersions =
+          new HashSet<>();
+      for (final FlowRunner runner : FlowRunnerManager.this.runningFlows.values()) {
+        final ExecutableFlow flow = runner.getExecutableFlow();
+        activeProjectVersions.add(new Pair<>(flow
+            .getProjectId(), flow.getVersion()));
+      }
+
+      for (final Map.Entry<Integer, ArrayList<ProjectVersion>> entry : projectVersions
+          .entrySet()) {
+        // Integer projectId = entry.getKey();
+        final ArrayList<ProjectVersion> installedVersions = entry.getValue();
+
+        // Keep one version of the project around.
+        if (installedVersions.size() == 1) {
+          continue;
+        }
+
+        Collections.sort(installedVersions);
+        for (int i = 0; i < installedVersions.size() - 1; ++i) {
+          final ProjectVersion version = installedVersions.get(i);
+          final Pair<Integer, Integer> versionKey =
+              new Pair<>(version.getProjectId(),
+                  version.getVersion());
+          if (!activeProjectVersions.contains(versionKey)) {
+            try {
+              logger.info("Removing old unused installed project "
+                  + version.getProjectId() + ":" + version.getVersion());
+              deleteDirectory(version);
+              FlowRunnerManager.this.installedProjects.remove(new Pair<>(version
+                  .getProjectId(), version.getVersion()));
+            } catch (final IOException e) {
+              logger.error(e);
+            }
+          }
+        }
+      }
+    }
   }
 
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManager.java b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManager.java
index 3437065..5664487 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManager.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManager.java
@@ -19,64 +19,65 @@ package azkaban.execapp.jmx;
 import azkaban.execapp.FlowRunnerManager;
 
 public class JmxFlowRunnerManager implements JmxFlowRunnerManagerMBean {
-  private FlowRunnerManager manager;
 
-  public JmxFlowRunnerManager(FlowRunnerManager manager) {
+  private final FlowRunnerManager manager;
+
+  public JmxFlowRunnerManager(final FlowRunnerManager manager) {
     this.manager = manager;
   }
 
   @Override
   public long getLastCleanerThreadCheckTime() {
-    return manager.getLastCleanerThreadCheckTime();
+    return this.manager.getLastCleanerThreadCheckTime();
   }
 
   @Override
   public boolean isCleanerThreadActive() {
-    return manager.isCleanerThreadActive();
+    return this.manager.isCleanerThreadActive();
   }
 
   @Override
   public String getCleanerThreadState() {
-    return manager.getCleanerThreadState().toString();
+    return this.manager.getCleanerThreadState().toString();
   }
 
   @Override
   public boolean isExecutorThreadPoolShutdown() {
-    return manager.isExecutorThreadPoolShutdown();
+    return this.manager.isExecutorThreadPoolShutdown();
   }
 
   @Override
   public int getNumRunningFlows() {
-    return manager.getNumRunningFlows();
+    return this.manager.getNumRunningFlows();
   }
 
   @Override
   public int getNumQueuedFlows() {
-    return manager.getNumQueuedFlows();
+    return this.manager.getNumQueuedFlows();
   }
 
   @Override
   public String getRunningFlows() {
-    return manager.getRunningFlowIds();
+    return this.manager.getRunningFlowIds();
   }
 
   @Override
   public String getQueuedFlows() {
-    return manager.getQueuedFlowIds();
+    return this.manager.getQueuedFlowIds();
   }
 
   @Override
   public int getMaxNumRunningFlows() {
-    return manager.getMaxNumRunningFlows();
+    return this.manager.getMaxNumRunningFlows();
   }
 
   @Override
   public int getMaxQueuedFlows() {
-    return manager.getTheadPoolQueueSize();
+    return this.manager.getTheadPoolQueueSize();
   }
 
   @Override
   public int getTotalNumExecutedFlows() {
-    return manager.getTotalNumExecutedFlows();
+    return this.manager.getTotalNumExecutedFlows();
   }
 }
\ No newline at end of file
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManagerMBean.java b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManagerMBean.java
index 41f8f04..3cd4df9 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManagerMBean.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxFlowRunnerManagerMBean.java
@@ -19,6 +19,7 @@ package azkaban.execapp.jmx;
 import azkaban.jmx.DisplayName;
 
 public interface JmxFlowRunnerManagerMBean {
+
   @DisplayName("OPERATION: getLastCleanerThreadCheckTime")
   public long getLastCleanerThreadCheckTime();
 
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallback.java b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallback.java
index 0b3a11d..01d15bb 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallback.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallback.java
@@ -4,30 +4,30 @@ import org.apache.http.impl.client.FutureRequestExecutionMetrics;
 
 public class JmxJobCallback implements JmxJobCallbackMBean {
 
-  private FutureRequestExecutionMetrics jobCallbackMetrics;
+  private final FutureRequestExecutionMetrics jobCallbackMetrics;
 
-  public JmxJobCallback(FutureRequestExecutionMetrics jobCallbackMetrics) {
+  public JmxJobCallback(final FutureRequestExecutionMetrics jobCallbackMetrics) {
     this.jobCallbackMetrics = jobCallbackMetrics;
   }
 
   @Override
   public long getNumJobCallbacks() {
-    return jobCallbackMetrics.getRequestCount();
+    return this.jobCallbackMetrics.getRequestCount();
   }
 
   @Override
   public long getNumSuccessfulJobCallbacks() {
-    return jobCallbackMetrics.getSuccessfulConnectionCount();
+    return this.jobCallbackMetrics.getSuccessfulConnectionCount();
   }
 
   @Override
   public long getNumFailedJobCallbacks() {
-    return jobCallbackMetrics.getFailedConnectionCount();
+    return this.jobCallbackMetrics.getFailedConnectionCount();
   }
 
   @Override
   public long getNumActiveJobCallbacks() {
-    return jobCallbackMetrics.getActiveConnectionCount();
+    return this.jobCallbackMetrics.getActiveConnectionCount();
   }
 
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallbackMBean.java b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallbackMBean.java
index 4b9d27d..9b70300 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallbackMBean.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobCallbackMBean.java
@@ -3,6 +3,7 @@ package azkaban.execapp.jmx;
 import azkaban.jmx.DisplayName;
 
 public interface JmxJobCallbackMBean {
+
   @DisplayName("OPERATION: getNumJobCallbacks")
   public long getNumJobCallbacks();
 
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMBeanManager.java b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMBeanManager.java
index 7b98603..9fdd7a8 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMBeanManager.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMBeanManager.java
@@ -1,11 +1,5 @@
 package azkaban.execapp.jmx;
 
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.log4j.Logger;
-
 import azkaban.event.Event;
 import azkaban.event.EventData;
 import azkaban.event.EventListener;
@@ -13,31 +7,34 @@ import azkaban.execapp.JobRunner;
 import azkaban.executor.ExecutableNode;
 import azkaban.executor.Status;
 import azkaban.utils.Props;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.log4j.Logger;
 
 /**
  * Responsible keeping track of job related MBean attributes through listening
  * to job related events.
- * 
- * @author hluu
  *
+ * @author hluu
  */
 public class JmxJobMBeanManager implements JmxJobMXBean, EventListener {
 
   private static final Logger logger = Logger
       .getLogger(JmxJobMBeanManager.class);
 
-  private static JmxJobMBeanManager INSTANCE = new JmxJobMBeanManager();
+  private static final JmxJobMBeanManager INSTANCE = new JmxJobMBeanManager();
 
-  private AtomicInteger runningJobCount = new AtomicInteger(0);
-  private AtomicInteger totalExecutedJobCount = new AtomicInteger(0);
-  private AtomicInteger totalFailedJobCount = new AtomicInteger(0);
-  private AtomicInteger totalSucceededJobCount = new AtomicInteger(0);
+  private final AtomicInteger runningJobCount = new AtomicInteger(0);
+  private final AtomicInteger totalExecutedJobCount = new AtomicInteger(0);
+  private final AtomicInteger totalFailedJobCount = new AtomicInteger(0);
+  private final AtomicInteger totalSucceededJobCount = new AtomicInteger(0);
 
-  private Map<String, AtomicInteger> jobTypeFailureMap =
-      new HashMap<String, AtomicInteger>();
+  private final Map<String, AtomicInteger> jobTypeFailureMap =
+      new HashMap<>();
 
-  private Map<String, AtomicInteger> jobTypeSucceededMap =
-      new HashMap<String, AtomicInteger>();
+  private final Map<String, AtomicInteger> jobTypeSucceededMap =
+      new HashMap<>();
 
   private boolean initialized;
 
@@ -48,46 +45,46 @@ public class JmxJobMBeanManager implements JmxJobMXBean, EventListener {
     return INSTANCE;
   }
 
-  public void initialize(Props props) {
+  public void initialize(final Props props) {
     logger.info("Initializing " + getClass().getName());
-    initialized = true;
+    this.initialized = true;
   }
 
   @Override
   public int getNumRunningJobs() {
-    return runningJobCount.get();
+    return this.runningJobCount.get();
   }
 
   @Override
   public int getTotalNumExecutedJobs() {
-    return totalExecutedJobCount.get();
+    return this.totalExecutedJobCount.get();
   }
 
   @Override
   public int getTotalFailedJobs() {
-    return totalFailedJobCount.get();
+    return this.totalFailedJobCount.get();
   }
 
   @Override
   public int getTotalSucceededJobs() {
-    return totalSucceededJobCount.get();
+    return this.totalSucceededJobCount.get();
   }
 
   @Override
   public Map<String, Integer> getTotalSucceededJobsByJobType() {
-    return convertMapValueToInteger(jobTypeSucceededMap);
+    return convertMapValueToInteger(this.jobTypeSucceededMap);
   }
 
   @Override
   public Map<String, Integer> getTotalFailedJobsByJobType() {
-    return convertMapValueToInteger(jobTypeFailureMap);
+    return convertMapValueToInteger(this.jobTypeFailureMap);
   }
 
   private Map<String, Integer> convertMapValueToInteger(
-      Map<String, AtomicInteger> map) {
-    Map<String, Integer> result = new HashMap<String, Integer>(map.size());
+      final Map<String, AtomicInteger> map) {
+    final Map<String, Integer> result = new HashMap<>(map.size());
 
-    for (Map.Entry<String, AtomicInteger> entry : map.entrySet()) {
+    for (final Map.Entry<String, AtomicInteger> entry : map.entrySet()) {
       result.put(entry.getKey(), entry.getValue().intValue());
     }
 
@@ -95,15 +92,15 @@ public class JmxJobMBeanManager implements JmxJobMXBean, EventListener {
   }
 
   @Override
-  public void handleEvent(Event event) {
-    if (!initialized) {
+  public void handleEvent(final Event event) {
+    if (!this.initialized) {
       throw new RuntimeException("JmxJobMBeanManager has not been initialized");
     }
 
     if (event.getRunner() instanceof JobRunner) {
-      JobRunner jobRunner = (JobRunner) event.getRunner();
-      EventData eventData = event.getData();
-      ExecutableNode node = jobRunner.getNode();
+      final JobRunner jobRunner = (JobRunner) event.getRunner();
+      final EventData eventData = event.getData();
+      final ExecutableNode node = jobRunner.getNode();
 
       if (logger.isDebugEnabled()) {
         logger.debug("*** got " + event.getType() + " " + node.getId() + " "
@@ -112,11 +109,11 @@ public class JmxJobMBeanManager implements JmxJobMXBean, EventListener {
       }
 
       if (event.getType() == Event.Type.JOB_STARTED) {
-        runningJobCount.incrementAndGet();
+        this.runningJobCount.incrementAndGet();
       } else if (event.getType() == Event.Type.JOB_FINISHED) {
-        totalExecutedJobCount.incrementAndGet();
-        if (runningJobCount.intValue() > 0) {
-          runningJobCount.decrementAndGet();
+        this.totalExecutedJobCount.incrementAndGet();
+        if (this.runningJobCount.intValue() > 0) {
+          this.runningJobCount.decrementAndGet();
         } else {
           logger.warn("runningJobCount not messed up, it is already zero "
               + "and we are trying to decrement on job event "
@@ -124,9 +121,9 @@ public class JmxJobMBeanManager implements JmxJobMXBean, EventListener {
         }
 
         if (eventData.getStatus() == Status.FAILED) {
-          totalFailedJobCount.incrementAndGet();
+          this.totalFailedJobCount.incrementAndGet();
         } else if (eventData.getStatus() == Status.SUCCEEDED) {
-          totalSucceededJobCount.incrementAndGet();
+          this.totalSucceededJobCount.incrementAndGet();
         }
 
         handleJobFinishedCount(eventData.getStatus(), node.getType());
@@ -138,20 +135,20 @@ public class JmxJobMBeanManager implements JmxJobMXBean, EventListener {
     }
   }
 
-  private void handleJobFinishedCount(Status status, String jobType) {
+  private void handleJobFinishedCount(final Status status, final String jobType) {
     switch (status) {
-    case FAILED:
-      handleJobFinishedByType(jobTypeFailureMap, jobType);
-      break;
-    case SUCCEEDED:
-      handleJobFinishedByType(jobTypeSucceededMap, jobType);
-      break;
-    default:
+      case FAILED:
+        handleJobFinishedByType(this.jobTypeFailureMap, jobType);
+        break;
+      case SUCCEEDED:
+        handleJobFinishedByType(this.jobTypeSucceededMap, jobType);
+        break;
+      default:
     }
   }
 
-  private void handleJobFinishedByType(Map<String, AtomicInteger> jobTypeMap,
-      String jobType) {
+  private void handleJobFinishedByType(final Map<String, AtomicInteger> jobTypeMap,
+      final String jobType) {
 
     synchronized (jobTypeMap) {
       AtomicInteger count = jobTypeMap.get(jobType);
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMXBean.java b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMXBean.java
index 9940188..a0abadb 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMXBean.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/jmx/JmxJobMXBean.java
@@ -1,14 +1,12 @@
 package azkaban.execapp.jmx;
 
-import java.util.Map;
-
 import azkaban.jmx.DisplayName;
+import java.util.Map;
 
 /**
  * Define all the MBean attributes at the job level
- * 
- * @author hluu
  *
+ * @author hluu
  */
 public interface JmxJobMXBean {
 
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/JMXHttpServlet.java b/azkaban-exec-server/src/main/java/azkaban/execapp/JMXHttpServlet.java
index b7e4df2..0cec41f 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/JMXHttpServlet.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/JMXHttpServlet.java
@@ -16,11 +16,14 @@
 
 package azkaban.execapp;
 
+import azkaban.Constants;
+import azkaban.executor.ConnectorParams;
+import azkaban.server.HttpRequestUtils;
+import azkaban.utils.JSONUtils;
 import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.TreeMap;
-
 import javax.management.MBeanAttributeInfo;
 import javax.management.MBeanInfo;
 import javax.management.ObjectName;
@@ -29,61 +32,56 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
 import org.apache.log4j.Logger;
 
-import azkaban.Constants;
-import azkaban.executor.ConnectorParams;
-import azkaban.server.HttpRequestUtils;
-import azkaban.utils.JSONUtils;
-
 public class JMXHttpServlet extends HttpServlet implements ConnectorParams {
+
   private static final long serialVersionUID = -3085603824826446270L;
   private static final Logger logger = Logger.getLogger(JMXHttpServlet.class);
   private AzkabanExecutorServer server;
 
   @Override
-  public void init(ServletConfig config) throws ServletException {
-    server =
+  public void init(final ServletConfig config) throws ServletException {
+    this.server =
         (AzkabanExecutorServer) config.getServletContext().getAttribute(
             Constants.AZKABAN_SERVLET_CONTEXT_KEY);
   }
 
-  public boolean hasParam(HttpServletRequest request, String param) {
+  public boolean hasParam(final HttpServletRequest request, final String param) {
     return HttpRequestUtils.hasParam(request, param);
   }
 
-  public String getParam(HttpServletRequest request, String name)
+  public String getParam(final HttpServletRequest request, final String name)
       throws ServletException {
     return HttpRequestUtils.getParam(request, name);
   }
 
   @Override
-  protected void doGet(HttpServletRequest req, HttpServletResponse resp)
+  protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
       throws ServletException, IOException {
-    Map<String, Object> ret = new HashMap<String, Object>();
+    final Map<String, Object> ret = new HashMap<>();
 
     if (hasParam(req, JMX_GET_MBEANS)) {
-      ret.put("mbeans", server.getMbeanNames());
+      ret.put("mbeans", this.server.getMbeanNames());
     } else if (hasParam(req, JMX_GET_ALL_MBEAN_ATTRIBUTES)) {
       if (!hasParam(req, JMX_MBEAN)) {
         ret.put("error", "Parameters 'mbean' must be set");
       } else {
-        String mbeanName = getParam(req, JMX_MBEAN);
+        final String mbeanName = getParam(req, JMX_MBEAN);
         try {
-          ObjectName name = new ObjectName(mbeanName);
-          MBeanInfo info = server.getMBeanInfo(name);
+          final ObjectName name = new ObjectName(mbeanName);
+          final MBeanInfo info = this.server.getMBeanInfo(name);
 
-          MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
-          Map<String, Object> attributes = new TreeMap<String, Object>();
+          final MBeanAttributeInfo[] mbeanAttrs = info.getAttributes();
+          final Map<String, Object> attributes = new TreeMap<>();
 
-          for (MBeanAttributeInfo attrInfo : mbeanAttrs) {
-            Object obj = server.getMBeanAttribute(name, attrInfo.getName());
+          for (final MBeanAttributeInfo attrInfo : mbeanAttrs) {
+            final Object obj = this.server.getMBeanAttribute(name, attrInfo.getName());
             attributes.put(attrInfo.getName(), obj);
           }
 
           ret.put("attributes", attributes);
-        } catch (Exception e) {
+        } catch (final Exception e) {
           logger.error(e);
           ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
         }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/JobRunner.java b/azkaban-exec-server/src/main/java/azkaban/execapp/JobRunner.java
index 3d95c3e..f19b91f 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/JobRunner.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/JobRunner.java
@@ -17,27 +17,6 @@
 package azkaban.execapp;
 
 import azkaban.Constants;
-import java.io.File;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.Optional;
-
-import org.apache.log4j.Appender;
-import org.apache.log4j.EnhancedPatternLayout;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.Layout;
-import org.apache.log4j.Logger;
-import org.apache.log4j.RollingFileAppender;
-
-import org.apache.kafka.log4jappender.KafkaLog4jAppender;
-
-import org.json.simple.JSONObject;
-
 import azkaban.event.Event;
 import azkaban.event.Event.Type;
 import azkaban.event.EventData;
@@ -56,46 +35,55 @@ import azkaban.jobExecutor.Job;
 import azkaban.jobtype.JobTypeManager;
 import azkaban.jobtype.JobTypeManagerException;
 import azkaban.utils.ExternalLinkUtils;
+import azkaban.utils.PatternLayoutEscaped;
 import azkaban.utils.Props;
 import azkaban.utils.StringUtils;
 import azkaban.utils.UndefinedPropertyException;
-import azkaban.utils.PatternLayoutEscaped;
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Optional;
+import java.util.Set;
+import org.apache.kafka.log4jappender.KafkaLog4jAppender;
+import org.apache.log4j.Appender;
+import org.apache.log4j.EnhancedPatternLayout;
+import org.apache.log4j.FileAppender;
+import org.apache.log4j.Layout;
+import org.apache.log4j.Logger;
+import org.apache.log4j.RollingFileAppender;
+import org.json.simple.JSONObject;
 
 public class JobRunner extends EventHandler implements Runnable {
-  public static final String AZKABAN_WEBSERVER_URL = "azkaban.webserver.url";
 
+  public static final String AZKABAN_WEBSERVER_URL = "azkaban.webserver.url";
+  private static final Object logCreatorLock = new Object();
   private final Layout DEFAULT_LAYOUT = new EnhancedPatternLayout(
       "%d{dd-MM-yyyy HH:mm:ss z} %c{1} %p - %m\n");
-
-  private ExecutorLoader loader;
-  private Props props;
-  private Props azkabanProps;
-  private ExecutableNode node;
-  private File workingDir;
-
+  private final Object syncObject = new Object();
+  private final JobTypeManager jobtypeManager;
+  private final ExecutorLoader loader;
+  private final Props props;
+  private final Props azkabanProps;
+  private final ExecutableNode node;
+  private final File workingDir;
+  private final Layout loggerLayout = this.DEFAULT_LAYOUT;
+  private final String jobId;
+  private final Set<String> pipelineJobs = new HashSet<>();
   private Logger logger = null;
-  private Layout loggerLayout = DEFAULT_LAYOUT;
   private Logger flowLogger = null;
-
   private Appender jobAppender = null;
   private Optional<Appender> kafkaAppender = Optional.empty();
   private File logFile;
   private String attachmentFileName;
-
   private Job job;
   private int executionId = -1;
-  private String jobId;
-
-  private static final Object logCreatorLock = new Object();
-  private final Object syncObject = new Object();
-
-  private final JobTypeManager jobtypeManager;
-
   // Used by the job to watch and block against another flow
   private Integer pipelineLevel = null;
   private FlowWatcher watcher = null;
-  private Set<String> pipelineJobs = new HashSet<String>();
-
   private Set<String> proxyUsers = null;
 
   private String jobLogChunkSize;
@@ -105,8 +93,8 @@ public class JobRunner extends EventHandler implements Runnable {
   private boolean killed = false;
   private BlockingStatus currentBlockStatus = null;
 
-  public JobRunner(ExecutableNode node, File workingDir, ExecutorLoader loader,
-      JobTypeManager jobtypeManager, Props azkabanProps) {
+  public JobRunner(final ExecutableNode node, final File workingDir, final ExecutorLoader loader,
+      final JobTypeManager jobtypeManager, final Props azkabanProps) {
     this.props = node.getInputProps();
     this.node = node;
     this.workingDir = workingDir;
@@ -118,69 +106,117 @@ public class JobRunner extends EventHandler implements Runnable {
     this.azkabanProps = azkabanProps;
   }
 
-  public void setValidatedProxyUsers(Set<String> proxyUsers) {
+  public static String createLogFileName(final ExecutableNode node, final int attempt) {
+    final int executionId = node.getExecutableFlow().getExecutionId();
+    String jobId = node.getId();
+    if (node.getExecutableFlow() != node.getParentFlow()) {
+      // Posix safe file delimiter
+      jobId = node.getPrintableId("._.");
+    }
+    return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
+        + ".log" : "_job." + executionId + "." + jobId + ".log";
+  }
+
+  public static String createLogFileName(final ExecutableNode node) {
+    return JobRunner.createLogFileName(node, node.getAttempt());
+  }
+
+  public static String createMetaDataFileName(final ExecutableNode node, final int attempt) {
+    final int executionId = node.getExecutableFlow().getExecutionId();
+    String jobId = node.getId();
+    if (node.getExecutableFlow() != node.getParentFlow()) {
+      // Posix safe file delimiter
+      jobId = node.getPrintableId("._.");
+    }
+
+    return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
+        + ".meta" : "_job." + executionId + "." + jobId + ".meta";
+  }
+
+  public static String createMetaDataFileName(final ExecutableNode node) {
+    return JobRunner.createMetaDataFileName(node, node.getAttempt());
+  }
+
+  public static String createAttachmentFileName(final ExecutableNode node) {
+
+    return JobRunner.createAttachmentFileName(node, node.getAttempt());
+  }
+
+  public static String createAttachmentFileName(final ExecutableNode node, final int attempt) {
+    final int executionId = node.getExecutableFlow().getExecutionId();
+    String jobId = node.getId();
+    if (node.getExecutableFlow() != node.getParentFlow()) {
+      // Posix safe file delimiter
+      jobId = node.getPrintableId("._.");
+    }
+
+    return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
+        + ".attach" : "_job." + executionId + "." + jobId + ".attach";
+  }
+
+  public void setValidatedProxyUsers(final Set<String> proxyUsers) {
     this.proxyUsers = proxyUsers;
   }
 
-  public void setLogSettings(Logger flowLogger, String logFileChuckSize,
-      int numLogBackup) {
+  public void setLogSettings(final Logger flowLogger, final String logFileChuckSize,
+      final int numLogBackup) {
     this.flowLogger = flowLogger;
     this.jobLogChunkSize = logFileChuckSize;
     this.jobLogBackupIndex = numLogBackup;
   }
 
   public Props getProps() {
-    return props;
+    return this.props;
   }
 
-  public void setPipeline(FlowWatcher watcher, int pipelineLevel) {
+  public void setPipeline(final FlowWatcher watcher, final int pipelineLevel) {
     this.watcher = watcher;
     this.pipelineLevel = pipelineLevel;
 
     if (this.pipelineLevel == 1) {
-      pipelineJobs.add(node.getNestedId());
+      this.pipelineJobs.add(this.node.getNestedId());
     } else if (this.pipelineLevel == 2) {
-      pipelineJobs.add(node.getNestedId());
-      ExecutableFlowBase parentFlow = node.getParentFlow();
+      this.pipelineJobs.add(this.node.getNestedId());
+      final ExecutableFlowBase parentFlow = this.node.getParentFlow();
 
-      if (parentFlow.getEndNodes().contains(node.getId())) {
+      if (parentFlow.getEndNodes().contains(this.node.getId())) {
         if (!parentFlow.getOutNodes().isEmpty()) {
-          ExecutableFlowBase grandParentFlow = parentFlow.getParentFlow();
-          for (String outNode : parentFlow.getOutNodes()) {
-            ExecutableNode nextNode =
+          final ExecutableFlowBase grandParentFlow = parentFlow.getParentFlow();
+          for (final String outNode : parentFlow.getOutNodes()) {
+            final ExecutableNode nextNode =
                 grandParentFlow.getExecutableNode(outNode);
 
             // If the next node is a nested flow, then we add the nested
             // starting nodes
             if (nextNode instanceof ExecutableFlowBase) {
-              ExecutableFlowBase nextFlow = (ExecutableFlowBase) nextNode;
-              findAllStartingNodes(nextFlow, pipelineJobs);
+              final ExecutableFlowBase nextFlow = (ExecutableFlowBase) nextNode;
+              findAllStartingNodes(nextFlow, this.pipelineJobs);
             } else {
-              pipelineJobs.add(nextNode.getNestedId());
+              this.pipelineJobs.add(nextNode.getNestedId());
             }
           }
         }
       } else {
-        for (String outNode : node.getOutNodes()) {
-          ExecutableNode nextNode = parentFlow.getExecutableNode(outNode);
+        for (final String outNode : this.node.getOutNodes()) {
+          final ExecutableNode nextNode = parentFlow.getExecutableNode(outNode);
 
           // If the next node is a nested flow, then we add the nested starting
           // nodes
           if (nextNode instanceof ExecutableFlowBase) {
-            ExecutableFlowBase nextFlow = (ExecutableFlowBase) nextNode;
-            findAllStartingNodes(nextFlow, pipelineJobs);
+            final ExecutableFlowBase nextFlow = (ExecutableFlowBase) nextNode;
+            findAllStartingNodes(nextFlow, this.pipelineJobs);
           } else {
-            pipelineJobs.add(nextNode.getNestedId());
+            this.pipelineJobs.add(nextNode.getNestedId());
           }
         }
       }
     }
   }
 
-  private void findAllStartingNodes(ExecutableFlowBase flow,
-      Set<String> pipelineJobs) {
-    for (String startingNode : flow.getStartNodes()) {
-      ExecutableNode node = flow.getExecutableNode(startingNode);
+  private void findAllStartingNodes(final ExecutableFlowBase flow,
+      final Set<String> pipelineJobs) {
+    for (final String startingNode : flow.getStartNodes()) {
+      final ExecutableNode node = flow.getExecutableNode(startingNode);
       if (node instanceof ExecutableFlowBase) {
         findAllStartingNodes((ExecutableFlowBase) node, pipelineJobs);
       } else {
@@ -192,162 +228,171 @@ public class JobRunner extends EventHandler implements Runnable {
   /**
    * Returns a list of jobs that this JobRunner will wait upon to finish before
    * starting. It is only relevant if pipeline is turned on.
-   *
-   * @return
    */
   public Set<String> getPipelineWatchedJobs() {
-    return pipelineJobs;
+    return this.pipelineJobs;
   }
 
-  public void setDelayStart(long delayMS) {
-    delayStartMs = delayMS;
+  public long getDelayStart() {
+    return this.delayStartMs;
   }
 
-  public long getDelayStart() {
-    return delayStartMs;
+  public void setDelayStart(final long delayMS) {
+    this.delayStartMs = delayMS;
   }
 
   public ExecutableNode getNode() {
-    return node;
+    return this.node;
   }
 
   public String getLogFilePath() {
-    return logFile == null ? null : logFile.getPath();
+    return this.logFile == null ? null : this.logFile.getPath();
   }
 
   private void createLogger() {
     // Create logger
     synchronized (logCreatorLock) {
-      String loggerName =
+      final String loggerName =
           System.currentTimeMillis() + "." + this.executionId + "."
               + this.jobId;
-      logger = Logger.getLogger(loggerName);
+      this.logger = Logger.getLogger(loggerName);
 
       try {
         attachFileAppender(createFileAppender());
-      } catch (IOException e) {
-        removeAppender(jobAppender);
-        flowLogger.error("Could not open log file in " + workingDir
+      } catch (final IOException e) {
+        removeAppender(this.jobAppender);
+        this.flowLogger.error("Could not open log file in " + this.workingDir
             + " for job " + this.jobId, e);
       }
 
-      if (props.getBoolean(Constants.JobProperties.AZKABAN_JOB_LOGGING_KAFKA_ENABLE, false)) {
+      if (this.props.getBoolean(Constants.JobProperties.AZKABAN_JOB_LOGGING_KAFKA_ENABLE, false)) {
         // Only attempt appender construction if required properties are present
-        if (azkabanProps.containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_BROKERLIST)
-            && azkabanProps.containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_TOPIC)) {
+        if (this.azkabanProps
+            .containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_BROKERLIST)
+            && this.azkabanProps
+            .containsKey(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_TOPIC)) {
           try {
             attachKafkaAppender(createKafkaAppender());
-          } catch (Exception e) {
-            removeAppender(kafkaAppender);
-            flowLogger.error("Failed to create Kafka appender for job " + this.jobId, e);
+          } catch (final Exception e) {
+            removeAppender(this.kafkaAppender);
+            this.flowLogger.error("Failed to create Kafka appender for job " + this.jobId, e);
           }
         } else {
-          flowLogger.info("Kafka appender not created as brokerlist or topic not provided by executor server");
+          this.flowLogger.info(
+              "Kafka appender not created as brokerlist or topic not provided by executor server");
         }
       }
     }
 
-    String externalViewer = ExternalLinkUtils.getExternalLogViewer(azkabanProps, this.jobId, props);
+    final String externalViewer = ExternalLinkUtils
+        .getExternalLogViewer(this.azkabanProps, this.jobId,
+            this.props);
     if (!externalViewer.isEmpty()) {
-      logger.info("See logs at: " + externalViewer);
+      this.logger.info("See logs at: " + externalViewer);
     }
   }
 
-  private void attachFileAppender(FileAppender appender) {
+  private void attachFileAppender(final FileAppender appender) {
     // If present, remove the existing file appender
-    assert(jobAppender == null);
+    assert (this.jobAppender == null);
 
-    jobAppender = appender;
-    logger.addAppender(jobAppender);
-    logger.setAdditivity(false);
-    flowLogger.info("Attached file appender for job " + this.jobId);
+    this.jobAppender = appender;
+    this.logger.addAppender(this.jobAppender);
+    this.logger.setAdditivity(false);
+    this.flowLogger.info("Attached file appender for job " + this.jobId);
   }
 
   private FileAppender createFileAppender() throws IOException {
     // Set up log files
-    String logName = createLogFileName(node);
-    logFile = new File(workingDir, logName);
-    String absolutePath = logFile.getAbsolutePath();
+    final String logName = createLogFileName(this.node);
+    this.logFile = new File(this.workingDir, logName);
+    final String absolutePath = this.logFile.getAbsolutePath();
 
     // Attempt to create FileAppender
-    RollingFileAppender fileAppender =
-        new RollingFileAppender(loggerLayout, absolutePath, true);
-    fileAppender.setMaxBackupIndex(jobLogBackupIndex);
-    fileAppender.setMaxFileSize(jobLogChunkSize);
+    final RollingFileAppender fileAppender =
+        new RollingFileAppender(this.loggerLayout, absolutePath, true);
+    fileAppender.setMaxBackupIndex(this.jobLogBackupIndex);
+    fileAppender.setMaxFileSize(this.jobLogChunkSize);
 
-    flowLogger.info("Created file appender for job " + this.jobId);
+    this.flowLogger.info("Created file appender for job " + this.jobId);
     return fileAppender;
   }
 
   private void createAttachmentFile() {
-    String fileName = createAttachmentFileName(node);
-    File file = new File(workingDir, fileName);
-    attachmentFileName = file.getAbsolutePath();
+    final String fileName = createAttachmentFileName(this.node);
+    final File file = new File(this.workingDir, fileName);
+    this.attachmentFileName = file.getAbsolutePath();
   }
 
-  private void attachKafkaAppender(KafkaLog4jAppender appender) {
+  private void attachKafkaAppender(final KafkaLog4jAppender appender) {
     // This should only be called once
-    assert(!kafkaAppender.isPresent());
+    assert (!this.kafkaAppender.isPresent());
 
-    kafkaAppender = Optional.of(appender);
-    logger.addAppender(kafkaAppender.get());
-    logger.setAdditivity(false);
-    flowLogger.info("Attached new Kafka appender for job " + this.jobId);
+    this.kafkaAppender = Optional.of(appender);
+    this.logger.addAppender(this.kafkaAppender.get());
+    this.logger.setAdditivity(false);
+    this.flowLogger.info("Attached new Kafka appender for job " + this.jobId);
   }
 
   private KafkaLog4jAppender createKafkaAppender() throws UndefinedPropertyException {
-    KafkaLog4jAppender kafkaProducer = new KafkaLog4jAppender();
+    final KafkaLog4jAppender kafkaProducer = new KafkaLog4jAppender();
     kafkaProducer.setSyncSend(false);
-    kafkaProducer.setBrokerList(azkabanProps.getString(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_BROKERLIST));
-    kafkaProducer.setTopic(azkabanProps.getString(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_TOPIC));
+    kafkaProducer.setBrokerList(this.azkabanProps
+        .getString(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_BROKERLIST));
+    kafkaProducer.setTopic(
+        this.azkabanProps
+            .getString(Constants.ConfigurationKeys.AZKABAN_SERVER_LOGGING_KAFKA_TOPIC));
 
-    JSONObject layout = new JSONObject();
+    final JSONObject layout = new JSONObject();
     layout.put("category", "%c{1}");
     layout.put("level", "%p");
     layout.put("message", "%m");
-    layout.put("projectname", props.getString(Constants.FlowProperties.AZKABAN_FLOW_PROJECT_NAME));
-    layout.put("flowid", props.getString(Constants.FlowProperties.AZKABAN_FLOW_FLOW_ID));
+    layout.put("projectname",
+        this.props.getString(Constants.FlowProperties.AZKABAN_FLOW_PROJECT_NAME));
+    layout.put("flowid", this.props.getString(Constants.FlowProperties.AZKABAN_FLOW_FLOW_ID));
     layout.put("jobid", this.jobId);
-    layout.put("submituser", props.getString(Constants.FlowProperties.AZKABAN_FLOW_SUBMIT_USER));
-    layout.put("execid", props.getString(Constants.FlowProperties.AZKABAN_FLOW_EXEC_ID));
-    layout.put("projectversion", props.getString(Constants.FlowProperties.AZKABAN_FLOW_PROJECT_VERSION));
+    layout
+        .put("submituser", this.props.getString(Constants.FlowProperties.AZKABAN_FLOW_SUBMIT_USER));
+    layout.put("execid", this.props.getString(Constants.FlowProperties.AZKABAN_FLOW_EXEC_ID));
+    layout.put("projectversion",
+        this.props.getString(Constants.FlowProperties.AZKABAN_FLOW_PROJECT_VERSION));
     layout.put("logsource", "userJob");
 
     kafkaProducer.setLayout(new PatternLayoutEscaped(layout.toString()));
     kafkaProducer.activateOptions();
 
-    flowLogger.info("Created kafka appender for " + this.jobId);
+    this.flowLogger.info("Created kafka appender for " + this.jobId);
     return kafkaProducer;
   }
 
-  private void removeAppender(Optional<Appender> appender) {
+  private void removeAppender(final Optional<Appender> appender) {
     if (appender.isPresent()) {
       removeAppender(appender.get());
     }
   }
 
-  private void removeAppender(Appender appender) {
+  private void removeAppender(final Appender appender) {
     if (appender != null) {
-      logger.removeAppender(appender);
+      this.logger.removeAppender(appender);
       appender.close();
     }
   }
 
   private void closeLogger() {
-    if (jobAppender != null) {
-      removeAppender(jobAppender);
+    if (this.jobAppender != null) {
+      removeAppender(this.jobAppender);
     }
-    if (kafkaAppender.isPresent()) {
-      removeAppender(kafkaAppender);
+    if (this.kafkaAppender.isPresent()) {
+      removeAppender(this.kafkaAppender);
     }
   }
 
   private void writeStatus() {
     try {
-      node.setUpdateTime(System.currentTimeMillis());
-      loader.updateExecutableNode(node);
-    } catch (ExecutorManagerException e) {
-      flowLogger.error("Could not update job properties in db for "
+      this.node.setUpdateTime(System.currentTimeMillis());
+      this.loader.updateExecutableNode(this.node);
+    } catch (final ExecutorManagerException e) {
+      this.flowLogger.error("Could not update job properties in db for "
           + this.jobId, e);
     }
   }
@@ -355,13 +400,11 @@ public class JobRunner extends EventHandler implements Runnable {
   /**
    * Used to handle non-ready and special status's (i.e. KILLED). Returns true
    * if they handled anything.
-   *
-   * @return
    */
   private boolean handleNonReadyStatus() {
-    Status nodeStatus = node.getStatus();
+    Status nodeStatus = this.node.getStatus();
     boolean quickFinish = false;
-    long time = System.currentTimeMillis();
+    final long time = System.currentTimeMillis();
 
     if (Status.isStatusFinished(nodeStatus)) {
       quickFinish = true;
@@ -374,10 +417,13 @@ public class JobRunner extends EventHandler implements Runnable {
     }
 
     if (quickFinish) {
-      node.setStartTime(time);
-      fireEvent(Event.create(this, Type.JOB_STARTED, new EventData(nodeStatus, node.getNestedId())));
-      node.setEndTime(time);
-      fireEvent(Event.create(this, Type.JOB_FINISHED, new EventData(nodeStatus, node.getNestedId())));
+      this.node.setStartTime(time);
+      fireEvent(
+          Event.create(this, Type.JOB_STARTED, new EventData(nodeStatus, this.node.getNestedId())));
+      this.node.setEndTime(time);
+      fireEvent(
+          Event
+              .create(this, Type.JOB_FINISHED, new EventData(nodeStatus, this.node.getNestedId())));
       return true;
     }
 
@@ -393,37 +439,37 @@ public class JobRunner extends EventHandler implements Runnable {
     }
 
     // For pipelining of jobs. Will watch other jobs.
-    if (!pipelineJobs.isEmpty()) {
+    if (!this.pipelineJobs.isEmpty()) {
       String blockedList = "";
-      ArrayList<BlockingStatus> blockingStatus =
-          new ArrayList<BlockingStatus>();
-      for (String waitingJobId : pipelineJobs) {
-        Status status = watcher.peekStatus(waitingJobId);
+      final ArrayList<BlockingStatus> blockingStatus =
+          new ArrayList<>();
+      for (final String waitingJobId : this.pipelineJobs) {
+        final Status status = this.watcher.peekStatus(waitingJobId);
         if (status != null && !Status.isStatusFinished(status)) {
-          BlockingStatus block = watcher.getBlockingStatus(waitingJobId);
+          final BlockingStatus block = this.watcher.getBlockingStatus(waitingJobId);
           blockingStatus.add(block);
           blockedList += waitingJobId + ",";
         }
       }
       if (!blockingStatus.isEmpty()) {
-        logger.info("Pipeline job " + this.jobId + " waiting on " + blockedList
-            + " in execution " + watcher.getExecId());
+        this.logger.info("Pipeline job " + this.jobId + " waiting on " + blockedList
+            + " in execution " + this.watcher.getExecId());
 
-        for (BlockingStatus bStatus : blockingStatus) {
-          logger.info("Waiting on pipelined job " + bStatus.getJobId());
-          currentBlockStatus = bStatus;
+        for (final BlockingStatus bStatus : blockingStatus) {
+          this.logger.info("Waiting on pipelined job " + bStatus.getJobId());
+          this.currentBlockStatus = bStatus;
           bStatus.blockOnFinishedStatus();
           if (this.isKilled()) {
-            logger.info("Job was killed while waiting on pipeline. Quiting.");
+            this.logger.info("Job was killed while waiting on pipeline. Quiting.");
             return true;
           } else {
-            logger.info("Pipelined job " + bStatus.getJobId() + " finished.");
+            this.logger.info("Pipelined job " + bStatus.getJobId() + " finished.");
           }
         }
       }
     }
 
-    currentBlockStatus = null;
+    this.currentBlockStatus = null;
     return false;
   }
 
@@ -432,24 +478,24 @@ public class JobRunner extends EventHandler implements Runnable {
       return true;
     }
 
-    long currentTime = System.currentTimeMillis();
-    if (delayStartMs > 0) {
-      logger.info("Delaying start of execution for " + delayStartMs
+    final long currentTime = System.currentTimeMillis();
+    if (this.delayStartMs > 0) {
+      this.logger.info("Delaying start of execution for " + this.delayStartMs
           + " milliseconds.");
       synchronized (this) {
         try {
-          this.wait(delayStartMs);
-          logger.info("Execution has been delayed for " + delayStartMs
+          this.wait(this.delayStartMs);
+          this.logger.info("Execution has been delayed for " + this.delayStartMs
               + " ms. Continuing with execution.");
-        } catch (InterruptedException e) {
-          logger.error("Job " + this.jobId + " was to be delayed for "
-              + delayStartMs + ". Interrupted after "
+        } catch (final InterruptedException e) {
+          this.logger.error("Job " + this.jobId + " was to be delayed for "
+              + this.delayStartMs + ". Interrupted after "
               + (System.currentTimeMillis() - currentTime));
         }
       }
 
       if (this.isKilled()) {
-        logger.info("Job was killed while in delay. Quiting.");
+        this.logger.info("Job was killed while in delay. Quiting.");
         return true;
       }
     }
@@ -457,46 +503,46 @@ public class JobRunner extends EventHandler implements Runnable {
     return false;
   }
 
-  private void finalizeLogFile(int attemptNo) {
+  private void finalizeLogFile(final int attemptNo) {
     closeLogger();
-    if (logFile == null) {
-      flowLogger.info("Log file for job " + this.jobId + " is null");
+    if (this.logFile == null) {
+      this.flowLogger.info("Log file for job " + this.jobId + " is null");
       return;
     }
 
     try {
-      File[] files = logFile.getParentFile().listFiles(new FilenameFilter() {
+      final File[] files = this.logFile.getParentFile().listFiles(new FilenameFilter() {
         @Override
-        public boolean accept(File dir, String name) {
-          return name.startsWith(logFile.getName());
+        public boolean accept(final File dir, final String name) {
+          return name.startsWith(JobRunner.this.logFile.getName());
         }
       });
       Arrays.sort(files, Collections.reverseOrder());
 
-      loader.uploadLogFile(executionId, this.node.getNestedId(), attemptNo,
+      this.loader.uploadLogFile(this.executionId, this.node.getNestedId(), attemptNo,
           files);
-    } catch (ExecutorManagerException e) {
-      flowLogger.error(
+    } catch (final ExecutorManagerException e) {
+      this.flowLogger.error(
           "Error writing out logs for job " + this.node.getNestedId(), e);
     }
   }
 
   private void finalizeAttachmentFile() {
-    if (attachmentFileName == null) {
-      flowLogger.info("Attachment file for job " + this.jobId + " is null");
+    if (this.attachmentFileName == null) {
+      this.flowLogger.info("Attachment file for job " + this.jobId + " is null");
       return;
     }
 
     try {
-      File file = new File(attachmentFileName);
+      final File file = new File(this.attachmentFileName);
       if (!file.exists()) {
-        flowLogger.info("No attachment file for job " + this.jobId
+        this.flowLogger.info("No attachment file for job " + this.jobId
             + " written.");
         return;
       }
-      loader.uploadAttachmentFile(node, file);
-    } catch (ExecutorManagerException e) {
-      flowLogger.error(
+      this.loader.uploadAttachmentFile(this.node, file);
+    } catch (final ExecutorManagerException e) {
+      this.flowLogger.error(
           "Error writing out attachment for job " + this.node.getNestedId(), e);
     }
   }
@@ -507,7 +553,7 @@ public class JobRunner extends EventHandler implements Runnable {
   @Override
   public void run() {
     Thread.currentThread().setName(
-        "JobRunner-" + this.jobId + "-" + executionId);
+        "JobRunner-" + this.jobId + "-" + this.executionId);
 
     // If the job is cancelled, disabled, killed. No log is created in this case
     if (handleNonReadyStatus()) {
@@ -525,29 +571,29 @@ public class JobRunner extends EventHandler implements Runnable {
     errorFound |= blockOnPipeLine();
 
     // Start the node.
-    node.setStartTime(System.currentTimeMillis());
-    Status finalStatus = node.getStatus();
+    this.node.setStartTime(System.currentTimeMillis());
+    Status finalStatus = this.node.getStatus();
     if (!errorFound && !isKilled()) {
-      fireEvent(Event.create(this, Type.JOB_STARTED, new EventData(node)));
+      fireEvent(Event.create(this, Type.JOB_STARTED, new EventData(this.node)));
       try {
-        loader.uploadExecutableNode(node, props);
-      } catch (ExecutorManagerException e1) {
-        logger.error("Error writing initial node properties");
+        this.loader.uploadExecutableNode(this.node, this.props);
+      } catch (final ExecutorManagerException e1) {
+        this.logger.error("Error writing initial node properties");
       }
 
-      Status prepareStatus = prepareJob();
+      final Status prepareStatus = prepareJob();
       if (prepareStatus != null) {
         // Writes status to the db
         writeStatus();
         fireEvent(Event.create(this, Type.JOB_STATUS_CHANGED,
-            new EventData(prepareStatus, node.getNestedId())));
+            new EventData(prepareStatus, this.node.getNestedId())));
         finalStatus = runJob();
       } else {
         finalStatus = changeStatus(Status.FAILED);
         logError("Job run failed preparing the job.");
       }
     }
-    node.setEndTime(System.currentTimeMillis());
+    this.node.setEndTime(System.currentTimeMillis());
 
     if (isKilled()) {
       // even if it's killed, there is a chance that the job failed is marked as
@@ -558,12 +604,12 @@ public class JobRunner extends EventHandler implements Runnable {
       finalStatus = changeStatus(Status.KILLED);
     }
 
-    int attemptNo = node.getAttempt();
+    final int attemptNo = this.node.getAttempt();
     logInfo("Finishing job " + this.jobId + " attempt: " + attemptNo + " at "
-        + node.getEndTime() + " with status " + node.getStatus());
+        + this.node.getEndTime() + " with status " + this.node.getStatus());
 
     fireEvent(Event.create(this, Type.JOB_FINISHED,
-        new EventData(finalStatus, node.getNestedId())), false);
+        new EventData(finalStatus, this.node.getNestedId())), false);
     finalizeLogFile(attemptNo);
     finalizeAttachmentFile();
     writeStatus();
@@ -571,59 +617,59 @@ public class JobRunner extends EventHandler implements Runnable {
 
   private Status prepareJob() throws RuntimeException {
     // Check pre conditions
-    if (props == null || this.isKilled()) {
+    if (this.props == null || this.isKilled()) {
       logError("Failing job. The job properties don't exist");
       return null;
     }
 
-    Status finalStatus;
-    synchronized (syncObject) {
-      if (node.getStatus() == Status.FAILED || this.isKilled()) {
+    final Status finalStatus;
+    synchronized (this.syncObject) {
+      if (this.node.getStatus() == Status.FAILED || this.isKilled()) {
         return null;
       }
 
-      if (node.getAttempt() > 0) {
-        logInfo("Starting job " + this.jobId + " attempt " + node.getAttempt()
-            + " at " + node.getStartTime());
+      if (this.node.getAttempt() > 0) {
+        logInfo("Starting job " + this.jobId + " attempt " + this.node.getAttempt()
+            + " at " + this.node.getStartTime());
       } else {
-        logInfo("Starting job " + this.jobId + " at " + node.getStartTime());
+        logInfo("Starting job " + this.jobId + " at " + this.node.getStartTime());
       }
 
       // If it's an embedded flow, we'll add the nested flow info to the job
       // conf
-      if (node.getExecutableFlow() != node.getParentFlow()) {
-        String subFlow = node.getPrintableId(":");
-        props.put(CommonJobProperties.NESTED_FLOW_PATH, subFlow);
+      if (this.node.getExecutableFlow() != this.node.getParentFlow()) {
+        final String subFlow = this.node.getPrintableId(":");
+        this.props.put(CommonJobProperties.NESTED_FLOW_PATH, subFlow);
       }
 
       insertJobMetadata();
       insertJVMAargs();
 
-      props.put(CommonJobProperties.JOB_ID, this.jobId);
-      props.put(CommonJobProperties.JOB_ATTEMPT, node.getAttempt());
-      props.put(CommonJobProperties.JOB_METADATA_FILE,
-          createMetaDataFileName(node));
-      props.put(CommonJobProperties.JOB_ATTACHMENT_FILE, attachmentFileName);
+      this.props.put(CommonJobProperties.JOB_ID, this.jobId);
+      this.props.put(CommonJobProperties.JOB_ATTEMPT, this.node.getAttempt());
+      this.props.put(CommonJobProperties.JOB_METADATA_FILE,
+          createMetaDataFileName(this.node));
+      this.props.put(CommonJobProperties.JOB_ATTACHMENT_FILE, this.attachmentFileName);
       finalStatus = changeStatus(Status.RUNNING);
 
       // Ability to specify working directory
-      if (!props.containsKey(AbstractProcessJob.WORKING_DIR)) {
-        props.put(AbstractProcessJob.WORKING_DIR, workingDir.getAbsolutePath());
+      if (!this.props.containsKey(AbstractProcessJob.WORKING_DIR)) {
+        this.props.put(AbstractProcessJob.WORKING_DIR, this.workingDir.getAbsolutePath());
       }
 
-      if (props.containsKey("user.to.proxy")) {
-        String jobProxyUser = props.getString("user.to.proxy");
-        if (proxyUsers != null && !proxyUsers.contains(jobProxyUser)) {
-          logger.error("User " + jobProxyUser
+      if (this.props.containsKey("user.to.proxy")) {
+        final String jobProxyUser = this.props.getString("user.to.proxy");
+        if (this.proxyUsers != null && !this.proxyUsers.contains(jobProxyUser)) {
+          this.logger.error("User " + jobProxyUser
               + " has no permission to execute this job " + this.jobId + "!");
           return null;
         }
       }
 
       try {
-        job = jobtypeManager.buildJobExecutor(this.jobId, props, logger);
-      } catch (JobTypeManagerException e) {
-        logger.error("Failed to build job type", e);
+        this.job = this.jobtypeManager.buildJobExecutor(this.jobId, this.props, this.logger);
+      } catch (final JobTypeManagerException e) {
+        this.logger.error("Failed to build job type", e);
         return null;
       }
     }
@@ -636,19 +682,19 @@ public class JobRunner extends EventHandler implements Runnable {
    * flow, execution id and job
    */
   private void insertJVMAargs() {
-    String flowName = node.getParentFlow().getFlowId();
-    String jobId = node.getId();
+    final String flowName = this.node.getParentFlow().getFlowId();
+    final String jobId = this.node.getId();
 
     String jobJVMArgs =
         String.format(
             "-Dazkaban.flowid=%s -Dazkaban.execid=%s -Dazkaban.jobid=%s",
-            flowName, executionId, jobId);
+            flowName, this.executionId, jobId);
 
-    String previousJVMArgs = props.get(JavaProcessJob.JVM_PARAMS);
+    final String previousJVMArgs = this.props.get(JavaProcessJob.JVM_PARAMS);
     jobJVMArgs += (previousJVMArgs == null) ? "" : " " + previousJVMArgs;
 
-    logger.info("job JVM args: " + jobJVMArgs);
-    props.put(JavaProcessJob.JVM_PARAMS, jobJVMArgs);
+    this.logger.info("job JVM args: " + jobJVMArgs);
+    this.props.put(JavaProcessJob.JVM_PARAMS, jobJVMArgs);
   }
 
   /**
@@ -656,44 +702,44 @@ public class JobRunner extends EventHandler implements Runnable {
    * know what executions initiated their execution.
    */
   private void insertJobMetadata() {
-    String baseURL = azkabanProps.get(AZKABAN_WEBSERVER_URL);
+    final String baseURL = this.azkabanProps.get(AZKABAN_WEBSERVER_URL);
     if (baseURL != null) {
-      String flowName = node.getParentFlow().getFlowId();
-      String projectName = node.getParentFlow().getProjectName();
-
-      props.put(CommonJobProperties.AZKABAN_URL, baseURL);
-      props.put(CommonJobProperties.EXECUTION_LINK,
-          String.format("%s/executor?execid=%d", baseURL, executionId));
-      props.put(CommonJobProperties.JOBEXEC_LINK, String.format(
-          "%s/executor?execid=%d&job=%s", baseURL, executionId, jobId));
-      props.put(CommonJobProperties.ATTEMPT_LINK, String.format(
-          "%s/executor?execid=%d&job=%s&attempt=%d", baseURL, executionId,
-          jobId, node.getAttempt()));
-      props.put(CommonJobProperties.WORKFLOW_LINK, String.format(
+      final String flowName = this.node.getParentFlow().getFlowId();
+      final String projectName = this.node.getParentFlow().getProjectName();
+
+      this.props.put(CommonJobProperties.AZKABAN_URL, baseURL);
+      this.props.put(CommonJobProperties.EXECUTION_LINK,
+          String.format("%s/executor?execid=%d", baseURL, this.executionId));
+      this.props.put(CommonJobProperties.JOBEXEC_LINK, String.format(
+          "%s/executor?execid=%d&job=%s", baseURL, this.executionId, this.jobId));
+      this.props.put(CommonJobProperties.ATTEMPT_LINK, String.format(
+          "%s/executor?execid=%d&job=%s&attempt=%d", baseURL, this.executionId,
+          this.jobId, this.node.getAttempt()));
+      this.props.put(CommonJobProperties.WORKFLOW_LINK, String.format(
           "%s/manager?project=%s&flow=%s", baseURL, projectName, flowName));
-      props.put(CommonJobProperties.JOB_LINK, String.format(
+      this.props.put(CommonJobProperties.JOB_LINK, String.format(
           "%s/manager?project=%s&flow=%s&job=%s", baseURL, projectName,
-          flowName, jobId));
+          flowName, this.jobId));
     } else {
-      if (logger != null) {
-        logger.info(AZKABAN_WEBSERVER_URL + " property was not set");
+      if (this.logger != null) {
+        this.logger.info(AZKABAN_WEBSERVER_URL + " property was not set");
       }
     }
     // out nodes
-    props.put(CommonJobProperties.OUT_NODES,
-        StringUtils.join2(node.getOutNodes(), ","));
+    this.props.put(CommonJobProperties.OUT_NODES,
+        StringUtils.join2(this.node.getOutNodes(), ","));
 
     // in nodes
-    props.put(CommonJobProperties.IN_NODES,
-        StringUtils.join2(node.getInNodes(), ","));
+    this.props.put(CommonJobProperties.IN_NODES,
+        StringUtils.join2(this.node.getInNodes(), ","));
   }
 
   private Status runJob() {
-    Status finalStatus = node.getStatus();
+    Status finalStatus = this.node.getStatus();
     try {
-      job.run();
-    } catch (Throwable e) {
-      if (props.getBoolean("job.succeed.on.failure", false)) {
+      this.job.run();
+    } catch (final Throwable e) {
+      if (this.props.getBoolean("job.succeed.on.failure", false)) {
         finalStatus = changeStatus(Status.FAILED_SUCCEEDED);
         logError("Job run failed, but will treat it like success.");
         logError(e.getMessage() + " cause: " + e.getCause(), e);
@@ -704,8 +750,8 @@ public class JobRunner extends EventHandler implements Runnable {
       }
     }
 
-    if (job != null) {
-      node.setOutputProps(job.getJobGeneratedProperties());
+    if (this.job != null) {
+      this.node.setOutputProps(this.job.getJobGeneratedProperties());
     }
 
     // If the job is still running, set the status to Success.
@@ -715,43 +761,43 @@ public class JobRunner extends EventHandler implements Runnable {
     return finalStatus;
   }
 
-  private Status changeStatus(Status status) {
+  private Status changeStatus(final Status status) {
     changeStatus(status, System.currentTimeMillis());
     return status;
   }
 
-  private Status changeStatus(Status status, long time) {
-    node.setStatus(status);
-    node.setUpdateTime(time);
+  private Status changeStatus(final Status status, final long time) {
+    this.node.setStatus(status);
+    this.node.setUpdateTime(time);
     return status;
   }
 
-  private void fireEvent(Event event) {
+  private void fireEvent(final Event event) {
     fireEvent(event, true);
   }
 
-  private void fireEvent(Event event, boolean updateTime) {
+  private void fireEvent(final Event event, final boolean updateTime) {
     if (updateTime) {
-      node.setUpdateTime(System.currentTimeMillis());
+      this.node.setUpdateTime(System.currentTimeMillis());
     }
     this.fireEventListeners(event);
   }
 
   public void kill() {
-    synchronized (syncObject) {
-      if (Status.isStatusFinished(node.getStatus())) {
+    synchronized (this.syncObject) {
+      if (Status.isStatusFinished(this.node.getStatus())) {
         return;
       }
       logError("Kill has been called.");
       this.killed = true;
 
-      BlockingStatus status = currentBlockStatus;
+      final BlockingStatus status = this.currentBlockStatus;
       if (status != null) {
         status.unblock();
       }
 
       // Cancel code here
-      if (job == null) {
+      if (this.job == null) {
         logError("Job hasn't started yet.");
         // Just in case we're waiting on the delay
         synchronized (this) {
@@ -761,10 +807,11 @@ public class JobRunner extends EventHandler implements Runnable {
       }
 
       try {
-        job.cancel();
-      } catch (Exception e) {
+        this.job.cancel();
+      } catch (final Exception e) {
         logError(e.getMessage());
-        logError("Failed trying to cancel job. Maybe it hasn't started running yet or just finished.");
+        logError(
+            "Failed trying to cancel job. Maybe it hasn't started running yet or just finished.");
       }
 
       this.changeStatus(Status.KILLED);
@@ -772,84 +819,36 @@ public class JobRunner extends EventHandler implements Runnable {
   }
 
   public boolean isKilled() {
-    return killed;
+    return this.killed;
   }
 
   public Status getStatus() {
-    return node.getStatus();
+    return this.node.getStatus();
   }
 
-  private void logError(String message) {
-    if (logger != null) {
-      logger.error(message);
+  private void logError(final String message) {
+    if (this.logger != null) {
+      this.logger.error(message);
     }
   }
 
-  private void logError(String message, Throwable t) {
-    if (logger != null) {
-      logger.error(message, t);
+  private void logError(final String message, final Throwable t) {
+    if (this.logger != null) {
+      this.logger.error(message, t);
     }
   }
 
-  private void logInfo(String message) {
-    if (logger != null) {
-      logger.info(message);
+  private void logInfo(final String message) {
+    if (this.logger != null) {
+      this.logger.info(message);
     }
   }
 
   public File getLogFile() {
-    return logFile;
+    return this.logFile;
   }
 
   public Logger getLogger() {
-    return logger;
-  }
-
-  public static String createLogFileName(ExecutableNode node, int attempt) {
-    int executionId = node.getExecutableFlow().getExecutionId();
-    String jobId = node.getId();
-    if (node.getExecutableFlow() != node.getParentFlow()) {
-      // Posix safe file delimiter
-      jobId = node.getPrintableId("._.");
-    }
-    return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
-        + ".log" : "_job." + executionId + "." + jobId + ".log";
-  }
-
-  public static String createLogFileName(ExecutableNode node) {
-    return JobRunner.createLogFileName(node, node.getAttempt());
-  }
-
-  public static String createMetaDataFileName(ExecutableNode node, int attempt) {
-    int executionId = node.getExecutableFlow().getExecutionId();
-    String jobId = node.getId();
-    if (node.getExecutableFlow() != node.getParentFlow()) {
-      // Posix safe file delimiter
-      jobId = node.getPrintableId("._.");
-    }
-
-    return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
-        + ".meta" : "_job." + executionId + "." + jobId + ".meta";
-  }
-
-  public static String createMetaDataFileName(ExecutableNode node) {
-    return JobRunner.createMetaDataFileName(node, node.getAttempt());
-  }
-
-  public static String createAttachmentFileName(ExecutableNode node) {
-
-    return JobRunner.createAttachmentFileName(node, node.getAttempt());
-  }
-
-  public static String createAttachmentFileName(ExecutableNode node, int attempt) {
-    int executionId = node.getExecutableFlow().getExecutionId();
-    String jobId = node.getId();
-    if (node.getExecutableFlow() != node.getParentFlow()) {
-      // Posix safe file delimiter
-      jobId = node.getPrintableId("._.");
-    }
-
-    return attempt > 0 ? "_job." + executionId + "." + attempt + "." + jobId
-        + ".attach" : "_job." + executionId + "." + jobId + ".attach";
+    return this.logger;
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedFlowMetric.java b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedFlowMetric.java
index b77c74f..ee5d491 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedFlowMetric.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedFlowMetric.java
@@ -28,26 +28,30 @@ import azkaban.metric.TimeBasedReportingMetric;
 /**
  * Metric to keep track of number of failed flows in between the tracking events
  */
-public class NumFailedFlowMetric extends TimeBasedReportingMetric<Integer> implements EventListener {
+public class NumFailedFlowMetric extends TimeBasedReportingMetric<Integer> implements
+    EventListener {
+
   public static final String NUM_FAILED_FLOW_METRIC_NAME = "NumFailedFlowMetric";
   private static final String NUM_FAILED_FLOW_METRIC_TYPE = "uint16";
 
-  public NumFailedFlowMetric(MetricReportManager manager, long interval) throws MetricException {
+  public NumFailedFlowMetric(final MetricReportManager manager, final long interval)
+      throws MetricException {
     super(NUM_FAILED_FLOW_METRIC_NAME, NUM_FAILED_FLOW_METRIC_TYPE, 0, manager, interval);
     logger.debug("Instantiated NumFailedJobMetric");
   }
 
   /**
-  * Listen for events to maintain correct value of number of failed flows
-  * {@inheritDoc}
-  * @see azkaban.event.EventListener#handleEvent(azkaban.event.Event)
-  */
+   * Listen for events to maintain correct value of number of failed flows
+   * {@inheritDoc}
+   *
+   * @see azkaban.event.EventListener#handleEvent(azkaban.event.Event)
+   */
   @Override
-  public synchronized void handleEvent(Event event) {
+  public synchronized void handleEvent(final Event event) {
     if (event.getType() == Type.FLOW_FINISHED) {
-      FlowRunner runner = (FlowRunner) event.getRunner();
+      final FlowRunner runner = (FlowRunner) event.getRunner();
       if (runner != null && runner.getExecutableFlow().getStatus().equals(Status.FAILED)) {
-        value = value + 1;
+        this.value = this.value + 1;
       }
     }
   }
@@ -59,7 +63,7 @@ public class NumFailedFlowMetric extends TimeBasedReportingMetric<Integer> imple
 
   @Override
   protected synchronized void postTrackingEventMethod() {
-    value = 0;
+    this.value = 0;
   }
 
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedJobMetric.java b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedJobMetric.java
index 0a0e937..e2599b2 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedJobMetric.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumFailedJobMetric.java
@@ -18,9 +18,7 @@ package azkaban.execapp.metric;
 
 import azkaban.event.Event;
 import azkaban.event.Event.Type;
-import azkaban.event.EventData;
 import azkaban.event.EventListener;
-import azkaban.execapp.JobRunner;
 import azkaban.executor.Status;
 import azkaban.metric.MetricException;
 import azkaban.metric.MetricReportManager;
@@ -30,10 +28,12 @@ import azkaban.metric.TimeBasedReportingMetric;
  * Metric to keep track of number of failed jobs in between the tracking events
  */
 public class NumFailedJobMetric extends TimeBasedReportingMetric<Integer> implements EventListener {
+
   public static final String NUM_FAILED_JOB_METRIC_NAME = "NumFailedJobMetric";
   private static final String NUM_FAILED_JOB_METRIC_TYPE = "uint16";
 
-  public NumFailedJobMetric(MetricReportManager manager, long interval) throws MetricException {
+  public NumFailedJobMetric(final MetricReportManager manager, final long interval)
+      throws MetricException {
     super(NUM_FAILED_JOB_METRIC_NAME, NUM_FAILED_JOB_METRIC_TYPE, 0, manager, interval);
     logger.debug("Instantiated NumFailedJobMetric");
   }
@@ -41,12 +41,13 @@ public class NumFailedJobMetric extends TimeBasedReportingMetric<Integer> implem
   /**
    * Listen for events to maintain correct value of number of failed jobs
    * {@inheritDoc}
+   *
    * @see azkaban.event.EventListener#handleEvent(azkaban.event.Event)
    */
   @Override
-  public synchronized void handleEvent(Event event) {
+  public synchronized void handleEvent(final Event event) {
     if (event.getType() == Type.JOB_FINISHED && Status.FAILED.equals(event.getData().getStatus())) {
-      value = value + 1;
+      this.value = this.value + 1;
     }
   }
 
@@ -57,7 +58,7 @@ public class NumFailedJobMetric extends TimeBasedReportingMetric<Integer> implem
 
   @Override
   protected synchronized void postTrackingEventMethod() {
-    value = 0;
+    this.value = 0;
   }
 
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumQueuedFlowMetric.java b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumQueuedFlowMetric.java
index c44917a..ccc447c 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumQueuedFlowMetric.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumQueuedFlowMetric.java
@@ -25,31 +25,34 @@ import azkaban.metric.TimeBasedReportingMetric;
  * Metric to keep track of number of queued flows in Azkaban exec server
  */
 public class NumQueuedFlowMetric extends TimeBasedReportingMetric<Integer> {
+
   public static final String NUM_QUEUED_FLOW_METRIC_NAME = "NumQueuedFlowMetric";
   private static final String NUM_QUEUED_FLOW_METRIC_TYPE = "uint16";
 
-  private FlowRunnerManager flowManager;
+  private final FlowRunnerManager flowManager;
 
   /**
    * @param flowRunnerManager Flow runner manager
    * @param manager metric report manager
    * @param interval reporting interval
-   * @throws MetricException
    */
-  public NumQueuedFlowMetric(FlowRunnerManager flowRunnerManager, MetricReportManager manager, long interval) throws MetricException {
+  public NumQueuedFlowMetric(final FlowRunnerManager flowRunnerManager,
+      final MetricReportManager manager,
+      final long interval) throws MetricException {
     super(NUM_QUEUED_FLOW_METRIC_NAME, NUM_QUEUED_FLOW_METRIC_TYPE, 0, manager, interval);
     logger.debug("Instantiated NumQueuedFlowMetric");
-    flowManager = flowRunnerManager;
+    this.flowManager = flowRunnerManager;
   }
 
   /**
    * Update value using flow manager
    * {@inheritDoc}
+   *
    * @see azkaban.metric.TimeBasedReportingMetric#preTrackingEventMethod()
    */
   @Override
   protected synchronized void preTrackingEventMethod() {
-    value = flowManager.getNumQueuedFlows();
+    this.value = this.flowManager.getNumQueuedFlows();
   }
 
   @Override
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningFlowMetric.java b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningFlowMetric.java
index d7d09cb..4317abd 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningFlowMetric.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningFlowMetric.java
@@ -25,31 +25,34 @@ import azkaban.metric.TimeBasedReportingMetric;
  * Metric to keep track of number of running flows in Azkaban exec server
  */
 public class NumRunningFlowMetric extends TimeBasedReportingMetric<Integer> {
+
   public static final String NUM_RUNNING_FLOW_METRIC_NAME = "NumRunningFlowMetric";
   private static final String NUM_RUNNING_FLOW_METRIC_TYPE = "uint16";
 
-  private FlowRunnerManager flowManager;
+  private final FlowRunnerManager flowManager;
 
   /**
    * @param flowRunnerManager Flow runner manager
    * @param manager metric report manager
    * @param interval reporting interval
-   * @throws MetricException
    */
-  public NumRunningFlowMetric(FlowRunnerManager flowRunnerManager, MetricReportManager manager, long interval) throws MetricException {
+  public NumRunningFlowMetric(final FlowRunnerManager flowRunnerManager,
+      final MetricReportManager manager,
+      final long interval) throws MetricException {
     super(NUM_RUNNING_FLOW_METRIC_NAME, NUM_RUNNING_FLOW_METRIC_TYPE, 0, manager, interval);
     logger.debug("Instantiated NumRunningFlowMetric");
-    flowManager = flowRunnerManager;
+    this.flowManager = flowRunnerManager;
   }
 
   /**
    * Update value using flow manager
    * {@inheritDoc}
+   *
    * @see azkaban.metric.TimeBasedReportingMetric#preTrackingEventMethod()
    */
   @Override
   protected synchronized void preTrackingEventMethod() {
-    value = flowManager.getNumRunningFlows();
+    this.value = this.flowManager.getNumRunningFlows();
   }
 
   @Override
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningJobMetric.java b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningJobMetric.java
index 84ebfd4..f20d8d9 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningJobMetric.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/metric/NumRunningJobMetric.java
@@ -26,16 +26,18 @@ import azkaban.metric.TimeBasedReportingMetric;
 /**
  * Metric to keep track of number of running jobs in Azkaban exec server
  */
-public class NumRunningJobMetric extends TimeBasedReportingMetric<Integer> implements EventListener {
+public class NumRunningJobMetric extends TimeBasedReportingMetric<Integer> implements
+    EventListener {
+
   public static final String NUM_RUNNING_JOB_METRIC_NAME = "NumRunningJobMetric";
   private static final String NUM_RUNNING_JOB_METRIC_TYPE = "uint16";
 
   /**
    * @param manager metric manager
    * @param interval reporting interval
-   * @throws MetricException
    */
-  public NumRunningJobMetric(MetricReportManager manager, long interval) throws MetricException {
+  public NumRunningJobMetric(final MetricReportManager manager, final long interval)
+      throws MetricException {
     super(NUM_RUNNING_JOB_METRIC_NAME, NUM_RUNNING_JOB_METRIC_TYPE, 0, manager, interval);
     logger.debug("Instantiated NumRunningJobMetric");
   }
@@ -43,14 +45,15 @@ public class NumRunningJobMetric extends TimeBasedReportingMetric<Integer> imple
   /**
    * Listen for events to maintain correct value of number of running jobs
    * {@inheritDoc}
+   *
    * @see azkaban.event.EventListener#handleEvent(azkaban.event.Event)
    */
   @Override
-  public synchronized void handleEvent(Event event) {
+  public synchronized void handleEvent(final Event event) {
     if (event.getType() == Type.JOB_STARTED) {
-      value = value + 1;
+      this.value = this.value + 1;
     } else if (event.getType() == Type.JOB_FINISHED) {
-      value = value - 1;
+      this.value = this.value - 1;
     }
   }
 
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/ProjectVersion.java b/azkaban-exec-server/src/main/java/azkaban/execapp/ProjectVersion.java
index a52d6e9..e5c1407 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/ProjectVersion.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/ProjectVersion.java
@@ -16,20 +16,19 @@
 
 package azkaban.execapp;
 
-import com.google.common.base.Preconditions;
-import java.io.File;
-import org.apache.log4j.Logger;
+import static com.google.common.base.Preconditions.checkArgument;
 
-import static com.google.common.base.Preconditions.*;
+import java.io.File;
 
 
 public class ProjectVersion implements Comparable<ProjectVersion> {
+
   private final int projectId;
   private final int version;
 
   private File installedDir;
 
-  public ProjectVersion(int projectId, int version) {
+  public ProjectVersion(final int projectId, final int version) {
     checkArgument(projectId > 0);
     checkArgument(version > 0);
 
@@ -37,39 +36,41 @@ public class ProjectVersion implements Comparable<ProjectVersion> {
     this.version = version;
   }
 
-  public ProjectVersion(int projectId, int version, File installedDir) {
+  public ProjectVersion(final int projectId, final int version, final File installedDir) {
     this(projectId, version);
     this.installedDir = installedDir;
   }
 
   public int getProjectId() {
-    return projectId;
+    return this.projectId;
   }
 
   public int getVersion() {
-    return version;
+    return this.version;
   }
 
   public File getInstalledDir() {
-    return installedDir;
+    return this.installedDir;
   }
 
-  public void setInstalledDir(File installedDir) {
+  public void setInstalledDir(final File installedDir) {
     this.installedDir = installedDir;
   }
 
   @Override
-  public int compareTo(ProjectVersion o) {
-    if (projectId == o.projectId) {
-      return version - o.version;
+  public int compareTo(final ProjectVersion o) {
+    if (this.projectId == o.projectId) {
+      return this.version - o.version;
     }
 
-    return projectId - o.projectId;
+    return this.projectId - o.projectId;
   }
 
   @Override
   public String toString() {
-    return "ProjectVersion{" + "projectId=" + projectId + ", version=" + version + ", installedDir=" + installedDir
+    return "ProjectVersion{" + "projectId=" + this.projectId + ", version=" + this.version
+        + ", installedDir="
+        + this.installedDir
         + '}';
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/ServerStatisticsServlet.java b/azkaban-exec-server/src/main/java/azkaban/execapp/ServerStatisticsServlet.java
index b9658d4..2833f23 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/ServerStatisticsServlet.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/ServerStatisticsServlet.java
@@ -16,6 +16,8 @@
 
 package azkaban.execapp;
 
+import azkaban.executor.ExecutorInfo;
+import azkaban.utils.JSONUtils;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
@@ -25,14 +27,11 @@ import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
 import org.apache.log4j.Logger;
 
-import azkaban.executor.ExecutorInfo;
-import azkaban.utils.JSONUtils;
-
 
 public class ServerStatisticsServlet extends HttpServlet {
+
   private static final long serialVersionUID = 1L;
   private static final int cacheTimeInMilliseconds = 1000;
   private static final Logger logger = Logger.getLogger(ServerStatisticsServlet.class);
@@ -50,12 +49,13 @@ public class ServerStatisticsServlet extends HttpServlet {
    * Handle all get request to Statistics Servlet {@inheritDoc}
    *
    * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest,
-   *      javax.servlet.http.HttpServletResponse)
+   * javax.servlet.http.HttpServletResponse)
    */
   @Override
-  protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+  protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
+      throws ServletException, IOException {
 
-    boolean noCache = null != req && Boolean.valueOf(req.getParameter(noCacheParamName));
+    final boolean noCache = null != req && Boolean.valueOf(req.getParameter(noCacheParamName));
 
     if (noCache || System.currentTimeMillis() - lastRefreshedTime > cacheTimeInMilliseconds) {
       this.populateStatistics(noCache);
@@ -66,25 +66,26 @@ public class ServerStatisticsServlet extends HttpServlet {
 
   /**
    * fill the result set with the percent of the remaining system memory on the server.
-   * @param stats reference to the result container which contains all the results, this specific method
-   *              will only work work on the property "remainingMemory" and "remainingMemoryPercent".
    *
-   * NOTE:
-   * a double value will be used to present the remaining memory,
-   *         a returning value of '55.6' means 55.6%
+   * @param stats reference to the result container which contains all the results, this specific
+   * method will only work work on the property "remainingMemory" and "remainingMemoryPercent".
+   *
+   * NOTE: a double value will be used to present the remaining memory, a returning value of '55.6'
+   * means 55.6%
    */
-  protected void fillRemainingMemoryPercent(ExecutorInfo stats) {
+  protected void fillRemainingMemoryPercent(final ExecutorInfo stats) {
     if (exists_Bash && exists_Cat && exists_Grep && exists_Meminfo) {
-      java.lang.ProcessBuilder processBuilder =
+      final java.lang.ProcessBuilder processBuilder =
           new java.lang.ProcessBuilder("/bin/bash", "-c",
               "/bin/cat /proc/meminfo | grep -E \"^MemTotal:|^MemFree:|^Buffers:|^Cached:|^SwapCached:\"");
       try {
-        ArrayList<String> output = new ArrayList<String>();
-        Process process = processBuilder.start();
+        final ArrayList<String> output = new ArrayList<>();
+        final Process process = processBuilder.start();
         process.waitFor();
-        InputStream inputStream = process.getInputStream();
+        final InputStream inputStream = process.getInputStream();
         try {
-          java.io.BufferedReader reader = new java.io.BufferedReader(new InputStreamReader(inputStream));
+          final java.io.BufferedReader reader = new java.io.BufferedReader(
+              new InputStreamReader(inputStream));
           String line = null;
           while ((line = reader.readLine()) != null) {
             output.add(line);
@@ -107,7 +108,7 @@ public class ServerStatisticsServlet extends HttpServlet {
         // Note : total free memory = freeMemory + cached + buffers + swapCached
         // TODO : think about merging the logic in systemMemoryInfo as the logic is similar
         if (output.size() == 5) {
-          for (String result : output) {
+          for (final String result : output) {
             // find the total memory and value the variable.
             parsedResult = extractMemoryInfo("MemTotal", result);
             if (null != parsedResult) {
@@ -144,31 +145,35 @@ public class ServerStatisticsServlet extends HttpServlet {
             }
           }
         } else {
-          logger.error("failed to get total/free memory info as the bash call returned invalid result."
-              + String.format(" Output from the bash call - %s ", output.toString()));
+          logger.error(
+              "failed to get total/free memory info as the bash call returned invalid result."
+                  + String.format(" Output from the bash call - %s ", output.toString()));
         }
 
         // the number got from the proc file is in KBs we want to see the number in MBs so we are dividing it by 1024.
         stats.setRemainingMemoryInMB(totalFreeMemory / 1024);
-        stats.setRemainingMemoryPercent(totalMemory == 0 ? 0 : ((double) totalFreeMemory / (double) totalMemory) * 100);
-      } catch (Exception ex) {
+        stats.setRemainingMemoryPercent(
+            totalMemory == 0 ? 0 : ((double) totalFreeMemory / (double) totalMemory) * 100);
+      } catch (final Exception ex) {
         logger.error("failed fetch system memory info "
-            + "as exception is captured when fetching result from bash call. Ex -" + ex.getMessage());
+            + "as exception is captured when fetching result from bash call. Ex -" + ex
+            .getMessage());
       }
     } else {
-      logger.error("failed fetch system memory info, one or more files from the following list are missing -  "
-          + "'/bin/bash'," + "'/bin/cat'," + "'/proc/loadavg'");
+      logger.error(
+          "failed fetch system memory info, one or more files from the following list are missing -  "
+              + "'/bin/bash'," + "'/bin/cat'," + "'/proc/loadavg'");
     }
   }
 
-  private Long extractMemoryInfo(String field, String result) {
+  private Long extractMemoryInfo(final String field, final String result) {
     Long returnResult = null;
     if (null != result && null != field && result.matches(String.format("^%s:.*", field))
         && result.split("\\s+").length > 2) {
       try {
         returnResult = Long.parseLong(result.split("\\s+")[1]);
         logger.debug(field + ":" + returnResult);
-      } catch (NumberFormatException e) {
+      } catch (final NumberFormatException e) {
         returnResult = 0L;
         logger.error(String.format("yielding 0 for %s as output is invalid - %s", field, result));
       }
@@ -179,8 +184,8 @@ public class ServerStatisticsServlet extends HttpServlet {
   /**
    * call the data providers to fill the returning data container for statistics data.
    * This function refreshes the static cached copy of data in case if necessary.
-   * */
-  protected synchronized void populateStatistics(boolean noCache) {
+   */
+  protected synchronized void populateStatistics(final boolean noCache) {
     //check again before starting the work.
     if (noCache || System.currentTimeMillis() - lastRefreshedTime > cacheTimeInMilliseconds) {
       final ExecutorInfo stats = new ExecutorInfo();
@@ -196,15 +201,16 @@ public class ServerStatisticsServlet extends HttpServlet {
 
   /**
    * fill the result set with the remaining flow capacity .
-   * @param stats reference to the result container which contains all the results, this specific method
-   *              will only work on the property "remainingFlowCapacity".
+   *
+   * @param stats reference to the result container which contains all the results, this specific
+   * method will only work on the property "remainingFlowCapacity".
    */
-  protected void fillRemainingFlowCapacityAndLastDispatchedTime(ExecutorInfo stats) {
+  protected void fillRemainingFlowCapacityAndLastDispatchedTime(final ExecutorInfo stats) {
 
-    AzkabanExecutorServer server = AzkabanExecutorServer.getApp();
+    final AzkabanExecutorServer server = AzkabanExecutorServer.getApp();
     if (server != null) {
-      FlowRunnerManager runnerMgr = AzkabanExecutorServer.getApp().getFlowRunnerManager();
-      int assignedFlows = runnerMgr.getNumRunningFlows() + runnerMgr.getNumQueuedFlows();
+      final FlowRunnerManager runnerMgr = AzkabanExecutorServer.getApp().getFlowRunnerManager();
+      final int assignedFlows = runnerMgr.getNumRunningFlows() + runnerMgr.getNumQueuedFlows();
       stats.setRemainingFlowCapacity(runnerMgr.getMaxNumRunningFlows() - assignedFlows);
       stats.setNumberOfAssignedFlows(assignedFlows);
       stats.setLastDispatchedTime(runnerMgr.getLastFlowSubmittedTime());
@@ -214,26 +220,29 @@ public class ServerStatisticsServlet extends HttpServlet {
     }
   }
 
-  /**<pre>
+  /**
+   * <pre>
    * fill the result set with the CPU usage .
    * Note : As the 'Top' bash call doesn't yield accurate result for the system load,
    *        the implementation has been changed to load from the "proc/loadavg" which keeps
    *        the moving average of the system load, we are pulling the average for the recent 1 min.
-   *</pre>
-   * @param stats reference to the result container which contains all the results, this specific method
-   *              will only work on the property "cpuUsage".
+   * </pre>
+   *
+   * @param stats reference to the result container which contains all the results, this specific
+   * method will only work on the property "cpuUsage".
    */
-  protected void fillCpuUsage(ExecutorInfo stats) {
+  protected void fillCpuUsage(final ExecutorInfo stats) {
     if (exists_Bash && exists_Cat && exists_LoadAvg) {
-      java.lang.ProcessBuilder processBuilder =
+      final java.lang.ProcessBuilder processBuilder =
           new java.lang.ProcessBuilder("/bin/bash", "-c", "/bin/cat /proc/loadavg");
       try {
-        ArrayList<String> output = new ArrayList<String>();
-        Process process = processBuilder.start();
+        final ArrayList<String> output = new ArrayList<>();
+        final Process process = processBuilder.start();
         process.waitFor();
-        InputStream inputStream = process.getInputStream();
+        final InputStream inputStream = process.getInputStream();
         try {
-          java.io.BufferedReader reader = new java.io.BufferedReader(new InputStreamReader(inputStream));
+          final java.io.BufferedReader reader = new java.io.BufferedReader(
+              new InputStreamReader(inputStream));
           String line = null;
           while ((line = reader.readLine()) != null) {
             output.add(line);
@@ -244,24 +253,26 @@ public class ServerStatisticsServlet extends HttpServlet {
 
         // process the output from bash call.
         if (output.size() > 0) {
-          String[] splitedresult = output.get(0).split("\\s+");
+          final String[] splitedresult = output.get(0).split("\\s+");
           double cpuUsage = 0.0;
 
           try {
             cpuUsage = Double.parseDouble(splitedresult[0]);
-          } catch (NumberFormatException e) {
+          } catch (final NumberFormatException e) {
             logger.error("yielding 0.0 for CPU usage as output is invalid -" + output.get(0));
           }
           logger.info("System load : " + cpuUsage);
           stats.setCpuUpsage(cpuUsage);
         }
-      } catch (Exception ex) {
+      } catch (final Exception ex) {
         logger.error("failed fetch system load info "
-            + "as exception is captured when fetching result from bash call. Ex -" + ex.getMessage());
+            + "as exception is captured when fetching result from bash call. Ex -" + ex
+            .getMessage());
       }
     } else {
-      logger.error("failed fetch system load info, one or more files from the following list are missing -  "
-          + "'/bin/bash'," + "'/bin/cat'," + "'/proc/loadavg'");
+      logger.error(
+          "failed fetch system load info, one or more files from the following list are missing -  "
+              + "'/bin/bash'," + "'/bin/cat'," + "'/proc/loadavg'");
     }
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/StatsServlet.java b/azkaban-exec-server/src/main/java/azkaban/execapp/StatsServlet.java
index 9b9370e..b5bd2b6 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/StatsServlet.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/StatsServlet.java
@@ -16,6 +16,15 @@
 
 package azkaban.execapp;
 
+import azkaban.executor.ConnectorParams;
+import azkaban.metric.IMetric;
+import azkaban.metric.IMetricEmitter;
+import azkaban.metric.MetricReportManager;
+import azkaban.metric.TimeBasedReportingMetric;
+import azkaban.metric.inmemoryemitter.InMemoryHistoryNode;
+import azkaban.metric.inmemoryemitter.InMemoryMetricEmitter;
+import azkaban.server.HttpRequestUtils;
+import azkaban.utils.JSONUtils;
 import java.io.IOException;
 import java.text.DateFormat;
 import java.text.ParseException;
@@ -25,47 +34,36 @@ import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
-
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
 import org.apache.log4j.Logger;
 
-import azkaban.executor.ConnectorParams;
-import azkaban.metric.IMetric;
-import azkaban.metric.IMetricEmitter;
-import azkaban.metric.MetricReportManager;
-import azkaban.metric.TimeBasedReportingMetric;
-import azkaban.metric.inmemoryemitter.InMemoryHistoryNode;
-import azkaban.metric.inmemoryemitter.InMemoryMetricEmitter;
-import azkaban.server.HttpRequestUtils;
-import azkaban.utils.JSONUtils;
-
 /**
  * Servlet to communicate with Azkaban exec server This servlet get requests
  * from stats servlet in Azkaban Web server
  */
 public class StatsServlet extends HttpServlet implements ConnectorParams {
+
   private static final long serialVersionUID = 2L;
   private static final Logger logger = Logger.getLogger(StatsServlet.class);
 
-  public boolean hasParam(HttpServletRequest request, String param) {
+  public boolean hasParam(final HttpServletRequest request, final String param) {
     return HttpRequestUtils.hasParam(request, param);
   }
 
-  public String getParam(HttpServletRequest request, String name)
+  public String getParam(final HttpServletRequest request, final String name)
       throws ServletException {
     return HttpRequestUtils.getParam(request, name);
   }
 
-  public Boolean getBooleanParam(HttpServletRequest request, String name)
+  public Boolean getBooleanParam(final HttpServletRequest request, final String name)
       throws ServletException {
     return HttpRequestUtils.getBooleanParam(request, name);
   }
 
-  public long getLongParam(HttpServletRequest request, String name)
+  public long getLongParam(final HttpServletRequest request, final String name)
       throws ServletException {
     return HttpRequestUtils.getLongParam(request, name);
   }
@@ -74,15 +72,15 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
    * Handle all get request to Stats Servlet {@inheritDoc}
    *
    * @see javax.servlet.http.HttpServlet#doGet(javax.servlet.http.HttpServletRequest,
-   *      javax.servlet.http.HttpServletResponse)
+   * javax.servlet.http.HttpServletResponse)
    */
   @Override
-  protected void doGet(HttpServletRequest req, HttpServletResponse resp)
+  protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
       throws ServletException, IOException {
-    Map<String, Object> ret = new HashMap<String, Object>();
+    final Map<String, Object> ret = new HashMap<>();
 
     if (hasParam(req, ACTION_PARAM)) {
-      String action = getParam(req, ACTION_PARAM);
+      final String action = getParam(req, ACTION_PARAM);
       if (action.equals(STATS_SET_REPORTINGINTERVAL)) {
         handleChangeMetricInterval(req, ret);
       } else if (action.equals(STATS_SET_CLEANINGINTERVAL)) {
@@ -109,13 +107,13 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
    * enable or disable metric Manager A disable will also purge all data from
    * all metric emitters
    */
-  private void handleChangeManagerStatusRequest(HttpServletRequest req,
-      Map<String, Object> ret, boolean enableMetricManager) {
+  private void handleChangeManagerStatusRequest(final HttpServletRequest req,
+      final Map<String, Object> ret, final boolean enableMetricManager) {
     try {
       logger.info("Updating metric manager status");
       if ((enableMetricManager && MetricReportManager.isInstantiated())
           || MetricReportManager.isAvailable()) {
-        MetricReportManager metricManager = MetricReportManager.getInstance();
+        final MetricReportManager metricManager = MetricReportManager.getInstance();
         if (enableMetricManager) {
           metricManager.enableManager();
         } else {
@@ -125,7 +123,7 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
       } else {
         ret.put(RESPONSE_ERROR, "MetricManager is not available");
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e);
       ret.put(RESPONSE_ERROR, e.getMessage());
     }
@@ -134,20 +132,20 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
   /**
    * Update number of display snapshots for /stats graphs
    */
-  private void handleChangeEmitterPoints(HttpServletRequest req,
-      Map<String, Object> ret) {
+  private void handleChangeEmitterPoints(final HttpServletRequest req,
+      final Map<String, Object> ret) {
     try {
-      long numInstance = getLongParam(req, STATS_MAP_EMITTERNUMINSTANCES);
+      final long numInstance = getLongParam(req, STATS_MAP_EMITTERNUMINSTANCES);
       if (MetricReportManager.isAvailable()) {
-        MetricReportManager metricManager = MetricReportManager.getInstance();
-        InMemoryMetricEmitter memoryEmitter =
+        final MetricReportManager metricManager = MetricReportManager.getInstance();
+        final InMemoryMetricEmitter memoryEmitter =
             extractInMemoryMetricEmitter(metricManager);
         memoryEmitter.setReportingInstances(numInstance);
         ret.put(STATUS_PARAM, RESPONSE_SUCCESS);
       } else {
         ret.put(RESPONSE_ERROR, "MetricManager is not available");
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e);
       ret.put(RESPONSE_ERROR, e.getMessage());
     }
@@ -156,20 +154,20 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
   /**
    * Update InMemoryMetricEmitter interval to maintain metric snapshots
    */
-  private void handleChangeCleaningInterval(HttpServletRequest req,
-      Map<String, Object> ret) {
+  private void handleChangeCleaningInterval(final HttpServletRequest req,
+      final Map<String, Object> ret) {
     try {
-      long newInterval = getLongParam(req, STATS_MAP_CLEANINGINTERVAL);
+      final long newInterval = getLongParam(req, STATS_MAP_CLEANINGINTERVAL);
       if (MetricReportManager.isAvailable()) {
-        MetricReportManager metricManager = MetricReportManager.getInstance();
-        InMemoryMetricEmitter memoryEmitter =
+        final MetricReportManager metricManager = MetricReportManager.getInstance();
+        final InMemoryMetricEmitter memoryEmitter =
             extractInMemoryMetricEmitter(metricManager);
         memoryEmitter.setReportingInterval(newInterval);
         ret.put(STATUS_PARAM, RESPONSE_SUCCESS);
       } else {
         ret.put(RESPONSE_ERROR, "MetricManager is not available");
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e);
       ret.put(RESPONSE_ERROR, e.getMessage());
     }
@@ -177,20 +175,18 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
 
   /**
    * Get metric snapshots for a metric and date specification
-   *
-   * @throws ServletException
    */
-  private void handleGetMetricHistory(HttpServletRequest req,
-      Map<String, Object> ret) throws ServletException {
+  private void handleGetMetricHistory(final HttpServletRequest req,
+      final Map<String, Object> ret) throws ServletException {
     if (MetricReportManager.isAvailable()) {
-      MetricReportManager metricManager = MetricReportManager.getInstance();
-      InMemoryMetricEmitter memoryEmitter =
+      final MetricReportManager metricManager = MetricReportManager.getInstance();
+      final InMemoryMetricEmitter memoryEmitter =
           extractInMemoryMetricEmitter(metricManager);
 
       // if we have a memory emitter
       if (memoryEmitter != null) {
         try {
-          List<InMemoryHistoryNode> result =
+          final List<InMemoryHistoryNode> result =
               memoryEmitter.getMetrics(
                   getParam(req, STATS_MAP_METRICNAMEPARAM),
                   parseDate(getParam(req, STATS_MAP_STARTDATE)),
@@ -203,7 +199,7 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
             ret.put(RESPONSE_ERROR, "No metric stats available");
           }
 
-        } catch (ParseException ex) {
+        } catch (final ParseException ex) {
           ret.put(RESPONSE_ERROR, "Invalid Date filter");
         }
       } else {
@@ -218,9 +214,9 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
    * Get InMemoryMetricEmitter, if available else null
    */
   private InMemoryMetricEmitter extractInMemoryMetricEmitter(
-      MetricReportManager metricManager) {
+      final MetricReportManager metricManager) {
     InMemoryMetricEmitter memoryEmitter = null;
-    for (IMetricEmitter emitter : metricManager.getMetricEmitters()) {
+    for (final IMetricEmitter emitter : metricManager.getMetricEmitters()) {
       if (emitter instanceof InMemoryMetricEmitter) {
         memoryEmitter = (InMemoryMetricEmitter) emitter;
         break;
@@ -232,16 +228,16 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
   /**
    * Get all the metrics tracked by metric manager
    */
-  private void handleGetAllMMetricsName(HttpServletRequest req,
-      Map<String, Object> ret) {
+  private void handleGetAllMMetricsName(final HttpServletRequest req,
+      final Map<String, Object> ret) {
     if (MetricReportManager.isAvailable()) {
-      MetricReportManager metricManager = MetricReportManager.getInstance();
-      List<IMetric<?>> result = metricManager.getAllMetrics();
+      final MetricReportManager metricManager = MetricReportManager.getInstance();
+      final List<IMetric<?>> result = metricManager.getAllMetrics();
       if (result.size() == 0) {
         ret.put(RESPONSE_ERROR, "No Metric being tracked");
       } else {
-        List<String> metricNames = new LinkedList<String>();
-        for (IMetric<?> metric : result) {
+        final List<String> metricNames = new LinkedList<>();
+        for (final IMetric<?> metric : result) {
           metricNames.add(metric.getName());
         }
         ret.put("data", metricNames);
@@ -253,17 +249,15 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
 
   /**
    * Update tracking interval for a given metrics
-   *
-   * @throws ServletException
    */
-  private void handleChangeMetricInterval(HttpServletRequest req,
-      Map<String, Object> ret) throws ServletException {
+  private void handleChangeMetricInterval(final HttpServletRequest req,
+      final Map<String, Object> ret) throws ServletException {
     try {
-      String metricName = getParam(req, STATS_MAP_METRICNAMEPARAM);
-      long newInterval = getLongParam(req, STATS_MAP_REPORTINGINTERVAL);
+      final String metricName = getParam(req, STATS_MAP_METRICNAMEPARAM);
+      final long newInterval = getLongParam(req, STATS_MAP_REPORTINGINTERVAL);
       if (MetricReportManager.isAvailable()) {
-        MetricReportManager metricManager = MetricReportManager.getInstance();
-        TimeBasedReportingMetric<?> metric =
+        final MetricReportManager metricManager = MetricReportManager.getInstance();
+        final TimeBasedReportingMetric<?> metric =
             (TimeBasedReportingMetric<?>) metricManager
                 .getMetricFromName(metricName);
         metric.updateInterval(newInterval);
@@ -271,14 +265,14 @@ public class StatsServlet extends HttpServlet implements ConnectorParams {
       } else {
         ret.put(RESPONSE_ERROR, "MetricManager is not available");
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       logger.error(e);
       ret.put(RESPONSE_ERROR, e.getMessage());
     }
   }
 
-  private Date parseDate(String date) throws ParseException {
-    DateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz");
+  private Date parseDate(final String date) throws ParseException {
+    final DateFormat format = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz");
     return format.parse(date);
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/Trigger.java b/azkaban-exec-server/src/main/java/azkaban/execapp/Trigger.java
index e1dfc73..6b09100 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/Trigger.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/Trigger.java
@@ -17,15 +17,15 @@
 
 package azkaban.execapp;
 
-import java.util.List;
-
 import azkaban.trigger.Condition;
 import azkaban.trigger.TriggerAction;
+import java.util.List;
 import org.apache.log4j.Logger;
 
 
 public class Trigger implements Runnable {
-  private static Logger logger = Logger.getLogger(azkaban.execapp.Trigger.class);
+
+  private static final Logger logger = Logger.getLogger(azkaban.execapp.Trigger.class);
   private final int execId;
 
   // condition to trigger actions(ex. flow running longer than X mins)
@@ -34,11 +34,10 @@ public class Trigger implements Runnable {
   private final Condition expireCondition;
   private final List<TriggerAction> actions;
 
-  public Trigger(int execId,
-                 Condition triggerCondition,
-                 Condition expireCondition,
-                 List<TriggerAction> actions)
-  {
+  public Trigger(final int execId,
+      final Condition triggerCondition,
+      final Condition expireCondition,
+      final List<TriggerAction> actions) {
     this.execId = execId;
     this.triggerCondition = triggerCondition;
     this.expireCondition = expireCondition;
@@ -51,19 +50,19 @@ public class Trigger implements Runnable {
    */
   @Override
   public void run() {
-    if(isTriggerExpired()) {
+    if (isTriggerExpired()) {
       logger.info(this + " expired");
-      return ;
+      return;
     }
 
-    boolean isTriggerConditionMet = triggerCondition.isMet();
+    final boolean isTriggerConditionMet = this.triggerCondition.isMet();
 
     if (isTriggerConditionMet) {
-      logger.info("Condition " + triggerCondition.getExpression() + " met");
-      for (TriggerAction action : actions) {
+      logger.info("Condition " + this.triggerCondition.getExpression() + " met");
+      for (final TriggerAction action : this.actions) {
         try {
           action.doAction();
-        } catch (Exception e) {
+        } catch (final Exception e) {
           logger.error("Failed to do action " + action.getDescription()
               + " for execution " + azkaban.execapp.Trigger.this.execId, e);
         }
@@ -73,21 +72,22 @@ public class Trigger implements Runnable {
 
   /**
    * Check if the trigger is expired and reset isExpired
+   *
    * @return true if trigger is expired
    */
   public boolean isTriggerExpired() {
-    return expireCondition.isMet();
+    return this.expireCondition.isMet();
   }
 
   public String toString() {
-    StringBuilder actionsString = new StringBuilder();
-    for (TriggerAction act : actions) {
+    final StringBuilder actionsString = new StringBuilder();
+    for (final TriggerAction act : this.actions) {
       actionsString.append(", ");
       actionsString.append(act.getDescription());
     }
 
-    return "Trigger for execution " + execId + " with trigger condition of "
-        + triggerCondition.getExpression() + " and expire condition of "
-        + expireCondition.getExpression() + actionsString;
+    return "Trigger for execution " + this.execId + " with trigger condition of "
+        + this.triggerCondition.getExpression() + " and expire condition of "
+        + this.expireCondition.getExpression() + actionsString;
   }
 }
diff --git a/azkaban-exec-server/src/main/java/azkaban/execapp/TriggerManager.java b/azkaban-exec-server/src/main/java/azkaban/execapp/TriggerManager.java
index 1a7e394..a02301c 100644
--- a/azkaban-exec-server/src/main/java/azkaban/execapp/TriggerManager.java
+++ b/azkaban-exec-server/src/main/java/azkaban/execapp/TriggerManager.java
@@ -16,6 +16,7 @@
 
 package azkaban.execapp;
 
+import azkaban.execapp.action.KillExecutionAction;
 import azkaban.sla.SlaOption;
 import azkaban.trigger.Condition;
 import azkaban.trigger.ConditionChecker;
@@ -23,7 +24,6 @@ import azkaban.trigger.TriggerAction;
 import azkaban.trigger.builtin.SlaAlertAction;
 import azkaban.trigger.builtin.SlaChecker;
 import azkaban.utils.Utils;
-import azkaban.execapp.action.KillExecutionAction;
 import com.google.inject.Inject;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -37,8 +37,9 @@ import org.joda.time.ReadablePeriod;
 
 
 public class TriggerManager {
-  private static Logger logger = Logger.getLogger(TriggerManager.class);
+
   private static final int SCHEDULED_THREAD_POOL_SIZE = 4;
+  private static final Logger logger = Logger.getLogger(TriggerManager.class);
   private final ScheduledExecutorService scheduledService;
 
   @Inject
@@ -46,17 +47,18 @@ public class TriggerManager {
     this.scheduledService = Executors.newScheduledThreadPool(SCHEDULED_THREAD_POOL_SIZE);
   }
 
-  private Condition createCondition(SlaOption sla, int execId, String checkerName, String checkerMethod) {
-    SlaChecker slaFailChecker = new SlaChecker(checkerName, sla, execId);
-    Map<String, ConditionChecker> slaCheckers = new HashMap<>();
+  private Condition createCondition(final SlaOption sla, final int execId, final String checkerName,
+      final String checkerMethod) {
+    final SlaChecker slaFailChecker = new SlaChecker(checkerName, sla, execId);
+    final Map<String, ConditionChecker> slaCheckers = new HashMap<>();
     slaCheckers.put(slaFailChecker.getId(), slaFailChecker);
     return new Condition(slaCheckers, slaFailChecker.getId() + "." + checkerMethod);
   }
 
-  private List<TriggerAction> createActions(SlaOption sla, int execId) {
-    List<TriggerAction> actions = new ArrayList<>();
-    List<String> slaActions = sla.getActions();
-    for (String act : slaActions) {
+  private List<TriggerAction> createActions(final SlaOption sla, final int execId) {
+    final List<TriggerAction> actions = new ArrayList<>();
+    final List<String> slaActions = sla.getActions();
+    for (final String act : slaActions) {
       TriggerAction action = null;
       switch (act) {
         case SlaOption.ACTION_ALERT:
@@ -76,25 +78,27 @@ public class TriggerManager {
     return actions;
   }
 
-  public void addTrigger(int execId, List<SlaOption> slaOptions) {
-    for (SlaOption sla : slaOptions) {
-      Condition triggerCond = createCondition(sla, execId, "slaFailChecker", "isSlaFailed()");
+  public void addTrigger(final int execId, final List<SlaOption> slaOptions) {
+    for (final SlaOption sla : slaOptions) {
+      final Condition triggerCond = createCondition(sla, execId, "slaFailChecker", "isSlaFailed()");
 
       // if whole flow finish before violating sla, just expire the checker
-      Condition expireCond = createCondition(sla, execId, "slaPassChecker", "isSlaPassed()");
+      final Condition expireCond = createCondition(sla, execId, "slaPassChecker", "isSlaPassed()");
 
-      List<TriggerAction> actions = createActions(sla, execId);
-      Trigger trigger = new Trigger(execId, triggerCond, expireCond, actions);
+      final List<TriggerAction> actions = createActions(sla, execId);
+      final Trigger trigger = new Trigger(execId, triggerCond, expireCond, actions);
 
-      ReadablePeriod duration = Utils.parsePeriodString((String) sla.getInfo().get(SlaOption.INFO_DURATION));
-      long durationInMillis = duration.toPeriod().toStandardDuration().getMillis();
+      final ReadablePeriod duration = Utils
+          .parsePeriodString((String) sla.getInfo().get(SlaOption.INFO_DURATION));
+      final long durationInMillis = duration.toPeriod().toStandardDuration().getMillis();
 
-      logger.info("Adding sla trigger " + sla.toString() + " to execution " + execId + ", scheduled to trigger in " + durationInMillis/1000 + " seconds");
-      scheduledService.schedule(trigger, durationInMillis, TimeUnit.MILLISECONDS);
+      logger.info("Adding sla trigger " + sla.toString() + " to execution " + execId
+          + ", scheduled to trigger in " + durationInMillis / 1000 + " seconds");
+      this.scheduledService.schedule(trigger, durationInMillis, TimeUnit.MILLISECONDS);
     }
   }
 
   public void shutdown() {
-    scheduledService.shutdownNow();
+    this.scheduledService.shutdownNow();
   }
 }
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/AzkabanExecutorServerTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/AzkabanExecutorServerTest.java
index ad62985..137f7ed 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/AzkabanExecutorServerTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/AzkabanExecutorServerTest.java
@@ -17,6 +17,10 @@
 
 package azkaban.execapp;
 
+import static java.util.Objects.requireNonNull;
+import static org.apache.commons.io.FileUtils.deleteQuietly;
+import static org.junit.Assert.assertNotNull;
+
 import azkaban.AzkabanCommonModule;
 import azkaban.Constants;
 import azkaban.database.AzkabanDatabaseSetup;
@@ -33,12 +37,9 @@ import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import static java.util.Objects.*;
-import static org.apache.commons.io.FileUtils.*;
-import static org.junit.Assert.*;
-
 
 public class AzkabanExecutorServerTest {
+
   public static final String AZKABAN_LOCAL_TEST_STORAGE = "AZKABAN_LOCAL_TEST_STORAGE";
   public static final String AZKABAN_DB_SQL_PATH = "azkaban-db/src/main/sql";
 
@@ -46,18 +47,19 @@ public class AzkabanExecutorServerTest {
 
   private static String getSqlScriptsDir() throws IOException {
     // Dummy because any resource file works.
-    URL resource = AzkabanExecutorServerTest.class.getClassLoader().getResource("test.file");
+    final URL resource = AzkabanExecutorServerTest.class.getClassLoader().getResource("test.file");
     final String dummyResourcePath = requireNonNull(resource).getPath();
-    Path resources = Paths.get(dummyResourcePath).getParent();
-    Path azkabanRoot = resources.getParent().getParent().getParent().getParent();
+    final Path resources = Paths.get(dummyResourcePath).getParent();
+    final Path azkabanRoot = resources.getParent().getParent().getParent().getParent();
 
-    File sqlScriptDir = Paths.get(azkabanRoot.toString(), AZKABAN_DB_SQL_PATH).toFile();
-    return props.getString(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, sqlScriptDir.getCanonicalPath());
+    final File sqlScriptDir = Paths.get(azkabanRoot.toString(), AZKABAN_DB_SQL_PATH).toFile();
+    return props
+        .getString(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, sqlScriptDir.getCanonicalPath());
   }
 
   @BeforeClass
   public static void setUp() throws Exception {
-    String sqlScriptsDir = getSqlScriptsDir();
+    final String sqlScriptsDir = getSqlScriptsDir();
     props.put(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, sqlScriptsDir);
 
     props.put("database.type", "h2");
@@ -77,9 +79,10 @@ public class AzkabanExecutorServerTest {
 
   @Test
   public void testInjection() throws Exception {
-    props.put(Constants.ConfigurationKeys.AZKABAN_STORAGE_LOCAL_BASEDIR, AZKABAN_LOCAL_TEST_STORAGE);
+    props
+        .put(Constants.ConfigurationKeys.AZKABAN_STORAGE_LOCAL_BASEDIR, AZKABAN_LOCAL_TEST_STORAGE);
 
-    Injector injector = Guice.createInjector(
+    final Injector injector = Guice.createInjector(
         new AzkabanCommonModule(props),
         new AzkabanExecServerModule()
     );
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/event/BlockingStatusTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/event/BlockingStatusTest.java
index ece8677..9ffc496 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/event/BlockingStatusTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/event/BlockingStatusTest.java
@@ -16,47 +16,27 @@
 
 package azkaban.execapp.event;
 
+import azkaban.executor.Status;
 import org.junit.Assert;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import azkaban.executor.Status;
-
 public class BlockingStatusTest {
 
-  public static class WatchingThread extends Thread {
-    private BlockingStatus status;
-    private long diff = 0;
-
-    public WatchingThread(BlockingStatus status) {
-      this.status = status;
-    }
-
-    @Override
-    public void run() {
-      long startTime = System.currentTimeMillis();
-      status.blockOnFinishedStatus();
-      diff = System.currentTimeMillis() - startTime;
-    }
-
-    public long getDiff() {
-      return diff;
-    }
-  }
-
   /**
-    * TODO: Ignore this test at present since travis in Github can not always pass this test.
-    *       We will modify the below code to make travis pass in future.
-    */
-  @Ignore @Test
+   * TODO: Ignore this test at present since travis in Github can not always pass this test.
+   * We will modify the below code to make travis pass in future.
+   */
+  @Ignore
+  @Test
   public void testFinishedBlock() {
-    BlockingStatus status = new BlockingStatus(1, "test", Status.SKIPPED);
+    final BlockingStatus status = new BlockingStatus(1, "test", Status.SKIPPED);
 
-    WatchingThread thread = new WatchingThread(status);
+    final WatchingThread thread = new WatchingThread(status);
     thread.start();
     try {
       thread.join();
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       e.printStackTrace();
     }
     System.out.println("Diff " + thread.getDiff());
@@ -65,13 +45,14 @@ public class BlockingStatusTest {
 
   /**
    * TODO: Ignore this test at present since travis in Github can not always pass this test.
-   *       We will modify the below code to make travis pass in future.
+   * We will modify the below code to make travis pass in future.
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testUnfinishedBlock() throws InterruptedException {
-    BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
+    final BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
 
-    WatchingThread thread = new WatchingThread(status);
+    final WatchingThread thread = new WatchingThread(status);
     thread.start();
 
     Thread.sleep(3000);
@@ -85,13 +66,14 @@ public class BlockingStatusTest {
 
   /**
    * TODO: Ignore this test at present since travis in Github can not always pass this test.
-   *       We will modify the below code to make travis pass in future.
+   * We will modify the below code to make travis pass in future.
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testUnfinishedBlockSeveralChanges() throws InterruptedException {
-    BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
+    final BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
 
-    WatchingThread thread = new WatchingThread(status);
+    final WatchingThread thread = new WatchingThread(status);
     thread.start();
 
     Thread.sleep(3000);
@@ -108,18 +90,19 @@ public class BlockingStatusTest {
 
   /**
    * TODO: Ignore this test at present since travis in Github can not always pass this test.
-   *       We will modify the below code to make travis pass in future.
+   * We will modify the below code to make travis pass in future.
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testMultipleWatchers() throws InterruptedException {
-    BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
+    final BlockingStatus status = new BlockingStatus(1, "test", Status.QUEUED);
 
-    WatchingThread thread1 = new WatchingThread(status);
+    final WatchingThread thread1 = new WatchingThread(status);
     thread1.start();
 
     Thread.sleep(2000);
 
-    WatchingThread thread2 = new WatchingThread(status);
+    final WatchingThread thread2 = new WatchingThread(status);
     thread2.start();
 
     Thread.sleep(2000);
@@ -132,4 +115,25 @@ public class BlockingStatusTest {
     Assert.assertTrue(thread1.getDiff() >= 4000 && thread1.getDiff() < 4200);
     Assert.assertTrue(thread2.getDiff() >= 2000 && thread2.getDiff() < 2200);
   }
+
+  public static class WatchingThread extends Thread {
+
+    private final BlockingStatus status;
+    private long diff = 0;
+
+    public WatchingThread(final BlockingStatus status) {
+      this.status = status;
+    }
+
+    @Override
+    public void run() {
+      final long startTime = System.currentTimeMillis();
+      this.status.blockOnFinishedStatus();
+      this.diff = System.currentTimeMillis() - startTime;
+    }
+
+    public long getDiff() {
+      return this.diff;
+    }
+  }
 }
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackRequestMakerTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackRequestMakerTest.java
index 9c654b9..62e995a 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackRequestMakerTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackRequestMakerTest.java
@@ -10,6 +10,9 @@ import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_PROJECT_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_SERVER_TOKEN;
 
+import azkaban.jobcallback.JobCallbackConstants;
+import azkaban.jobcallback.JobCallbackStatusEnum;
+import azkaban.utils.Props;
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.Writer;
@@ -17,12 +20,10 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
-
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
-
 import org.apache.http.client.methods.HttpRequestBase;
 import org.apache.log4j.Logger;
 import org.junit.AfterClass;
@@ -32,10 +33,6 @@ import org.mortbay.jetty.Server;
 import org.mortbay.jetty.servlet.Context;
 import org.mortbay.jetty.servlet.ServletHolder;
 
-import azkaban.jobcallback.JobCallbackConstants;
-import azkaban.jobcallback.JobCallbackStatusEnum;
-import azkaban.utils.Props;
-
 public class JobCallbackRequestMakerTest {
 
   private static final Logger logger = Logger
@@ -60,15 +57,15 @@ public class JobCallbackRequestMakerTest {
 
   @BeforeClass
   public static void setup() throws Exception {
-    Props props = new Props();
-    int timeout = 50;
+    final Props props = new Props();
+    final int timeout = 50;
     props.put(JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT, timeout);
     props.put(JOBCALLBACK_CONNECTION_TIMEOUT, timeout);
     props.put(JOBCALLBACK_SOCKET_TIMEOUT, timeout);
     JobCallbackRequestMaker.initialize(props);
     jobCBMaker = JobCallbackRequestMaker.getInstance();
 
-    contextInfo = new HashMap<String, String>();
+    contextInfo = new HashMap<>();
     contextInfo.put(CONTEXT_SERVER_TOKEN, SERVER_NAME);
     contextInfo.put(CONTEXT_PROJECT_TOKEN, PROJECT_NANE);
     contextInfo.put(CONTEXT_FLOW_TOKEN, FLOW_NANE);
@@ -78,7 +75,7 @@ public class JobCallbackRequestMakerTest {
 
     embeddedJettyServer = new Server(PORT_NUMBER);
 
-    Context context = new Context(embeddedJettyServer, "/", Context.SESSIONS);
+    final Context context = new Context(embeddedJettyServer, "/", Context.SESSIONS);
     context.addServlet(new ServletHolder(new DelayServlet()), "/delay");
 
     System.out.println("Start server");
@@ -94,42 +91,115 @@ public class JobCallbackRequestMakerTest {
     }
   }
 
+  private String buildUrlForDelay(final int delay) {
+    return "http://localhost:" + PORT_NUMBER + "/delay?" + SLEEP_DURATION_PARAM
+        + "=" + delay;
+  }
+
+  private String buildUrlForStatusCode(final int sc) {
+    return "http://localhost:" + PORT_NUMBER + "/delay?" + STATUS_CODE_PARAM
+        + "=" + sc;
+  }
+
+  @Test(timeout = 4000)
+  public void basicGetTest() {
+    final Props props = new Props();
+    final String url = buildUrlForDelay(1);
+
+    props.put("job.notification."
+        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
+
+    final List<HttpRequestBase> httpRequestList =
+        JobCallbackUtil.parseJobCallbackProperties(props,
+            JobCallbackStatusEnum.STARTED, contextInfo, 3);
+
+    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
+  }
+
+  @Test(timeout = 4000)
+  public void simulateNotOKStatusCodeTest() {
+    final Props props = new Props();
+    final String url = buildUrlForStatusCode(404);
+    props.put("job.notification."
+        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
+
+    final List<HttpRequestBase> httpRequestList =
+        JobCallbackUtil.parseJobCallbackProperties(props,
+            JobCallbackStatusEnum.STARTED, contextInfo, 3);
+
+    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
+  }
+
+  @Test(timeout = 4000)
+  public void unResponsiveGetTest() {
+    final Props props = new Props();
+    final String url = buildUrlForDelay(10);
+    props.put("job.notification."
+        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
+
+    final List<HttpRequestBase> httpRequestList =
+        JobCallbackUtil.parseJobCallbackProperties(props,
+            JobCallbackStatusEnum.STARTED, contextInfo, 3);
+
+    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
+  }
+
+  @Test(timeout = 4000)
+  public void basicPostTest() {
+    final Props props = new Props();
+    final String url = buildUrlForDelay(1);
+    props.put("job.notification."
+        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
+    props.put("job.notification."
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.method",
+        JobCallbackConstants.HTTP_POST);
+    props.put("job.notification."
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.body",
+        "This is it");
+
+    final List<HttpRequestBase> httpRequestList =
+        JobCallbackUtil.parseJobCallbackProperties(props,
+            JobCallbackStatusEnum.STARTED, contextInfo, 3);
+
+    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
+  }
+
   private static class DelayServlet extends HttpServlet {
 
     @Override
-    public void doGet(HttpServletRequest req, HttpServletResponse resp)
+    public void doGet(final HttpServletRequest req, final HttpServletResponse resp)
         throws ServletException, IOException {
 
       logger.info("Get get request: " + req.getRequestURI());
       logger.info("Get get request params: " + req.getParameterMap());
 
-      long start = System.currentTimeMillis();
+      final long start = System.currentTimeMillis();
       String responseMessage = handleDelay(req);
       logger
           .info("handleDelay elapse: " + (System.currentTimeMillis() - start));
 
       responseMessage = handleSimulatedStatusCode(req, resp, responseMessage);
 
-      Writer writer = resp.getWriter();
+      final Writer writer = resp.getWriter();
       writer.write(responseMessage);
       writer.close();
     }
 
-    private String handleSimulatedStatusCode(HttpServletRequest req,
-        HttpServletResponse resp, String responseMessge) {
-      String returnedStatusCodeStr = req.getParameter(STATUS_CODE_PARAM);
+    private String handleSimulatedStatusCode(final HttpServletRequest req,
+        final HttpServletResponse resp, String responseMessge) {
+      final String returnedStatusCodeStr = req.getParameter(STATUS_CODE_PARAM);
       if (returnedStatusCodeStr != null) {
-        int statusCode = Integer.parseInt(returnedStatusCodeStr);
+        final int statusCode = Integer.parseInt(returnedStatusCodeStr);
         responseMessge = "Not good";
         resp.setStatus(statusCode);
       }
       return responseMessge;
     }
 
-    private String handleDelay(HttpServletRequest req) {
-      String sleepParamValue = req.getParameter(SLEEP_DURATION_PARAM);
+    private String handleDelay(final HttpServletRequest req) {
+      final String sleepParamValue = req.getParameter(SLEEP_DURATION_PARAM);
       if (sleepParamValue != null) {
-        long howLongMS =
+        final long howLongMS =
             TimeUnit.MILLISECONDS.convert(Integer.parseInt(sleepParamValue),
                 TimeUnit.SECONDS);
 
@@ -138,7 +208,7 @@ public class JobCallbackRequestMakerTest {
         try {
           Thread.sleep(howLongMS);
           return "Voila!!";
-        } catch (InterruptedException e) {
+        } catch (final InterruptedException e) {
           // don't care
           return e.getMessage();
         }
@@ -147,12 +217,12 @@ public class JobCallbackRequestMakerTest {
     }
 
     @Override
-    public void doPost(HttpServletRequest req, HttpServletResponse resp)
+    public void doPost(final HttpServletRequest req, final HttpServletResponse resp)
         throws ServletException, IOException {
       logger.info("Get post request: " + req.getRequestURI());
       logger.info("Get post request params: " + req.getParameterMap());
 
-      BufferedReader reader = req.getReader();
+      final BufferedReader reader = req.getReader();
       String line = null;
       while ((line = reader.readLine()) != null) {
         logger.info("post body: " + line);
@@ -162,82 +232,9 @@ public class JobCallbackRequestMakerTest {
       String responseMessage = handleDelay(req);
       responseMessage = handleSimulatedStatusCode(req, resp, responseMessage);
 
-      Writer writer = resp.getWriter();
+      final Writer writer = resp.getWriter();
       writer.write(responseMessage);
       writer.close();
     }
   }
-
-  private String buildUrlForDelay(int delay) {
-    return "http://localhost:" + PORT_NUMBER + "/delay?" + SLEEP_DURATION_PARAM
-        + "=" + delay;
-  }
-
-  private String buildUrlForStatusCode(int sc) {
-    return "http://localhost:" + PORT_NUMBER + "/delay?" + STATUS_CODE_PARAM
-        + "=" + sc;
-  }
-
-  @Test(timeout = 4000)
-  public void basicGetTest() {
-    Props props = new Props();
-    String url = buildUrlForDelay(1);
-
-    props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
-
-    List<HttpRequestBase> httpRequestList =
-        JobCallbackUtil.parseJobCallbackProperties(props,
-            JobCallbackStatusEnum.STARTED, contextInfo, 3);
-
-    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
-  }
-
-  @Test(timeout = 4000)
-  public void simulateNotOKStatusCodeTest() {
-    Props props = new Props();
-    String url = buildUrlForStatusCode(404);
-    props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
-
-    List<HttpRequestBase> httpRequestList =
-        JobCallbackUtil.parseJobCallbackProperties(props,
-            JobCallbackStatusEnum.STARTED, contextInfo, 3);
-
-    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
-  }
-
-  @Test(timeout = 4000)
-  public void unResponsiveGetTest() {
-    Props props = new Props();
-    String url = buildUrlForDelay(10);
-    props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
-
-    List<HttpRequestBase> httpRequestList =
-        JobCallbackUtil.parseJobCallbackProperties(props,
-            JobCallbackStatusEnum.STARTED, contextInfo, 3);
-
-    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
-  }
-
-  @Test(timeout = 4000)
-  public void basicPostTest() {
-    Props props = new Props();
-    String url = buildUrlForDelay(1);
-    props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
-    props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.method",
-        JobCallbackConstants.HTTP_POST);
-    props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.body",
-        "This is it");
-
-    List<HttpRequestBase> httpRequestList =
-        JobCallbackUtil.parseJobCallbackProperties(props,
-            JobCallbackStatusEnum.STARTED, contextInfo, 3);
-
-    jobCBMaker.makeHttpRequest(JOB_NANE, logger, httpRequestList);
-  }
 }
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackUtilTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackUtilTest.java
index 646f6e1..228c7e5 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackUtilTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/event/JobCallbackUtilTest.java
@@ -2,18 +2,20 @@ package azkaban.execapp.event;
 
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_EXECUTION_ID_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_FLOW_TOKEN;
-import static azkaban.jobcallback.JobCallbackConstants.HTTP_GET;
-import static azkaban.jobcallback.JobCallbackConstants.HTTP_POST;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_STATUS_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_PROJECT_TOKEN;
 import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_SERVER_TOKEN;
+import static azkaban.jobcallback.JobCallbackConstants.HTTP_GET;
+import static azkaban.jobcallback.JobCallbackConstants.HTTP_POST;
 
+import azkaban.jobcallback.JobCallbackConstants;
+import azkaban.jobcallback.JobCallbackStatusEnum;
+import azkaban.utils.Props;
 import java.net.URLEncoder;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-
 import org.apache.http.Header;
 import org.apache.http.client.methods.HttpPost;
 import org.apache.http.client.methods.HttpRequestBase;
@@ -21,12 +23,7 @@ import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import azkaban.jobcallback.JobCallbackConstants;
-import azkaban.jobcallback.JobCallbackStatusEnum;
-import azkaban.utils.Props;
-
 public class JobCallbackUtilTest {
-  private static Map<String, String> contextInfo;
 
   private static final String SERVER_NAME = "localhost:9999";
   private static final String PROJECT_NAME = "PROJECTX";
@@ -35,10 +32,11 @@ public class JobCallbackUtilTest {
   private static final String EXECUTION_ID = "1234";
   private static final String JOB_STATUS_NAME = JobCallbackStatusEnum.STARTED
       .name();
+  private static Map<String, String> contextInfo;
 
   @BeforeClass
   public static void setup() {
-    contextInfo = new HashMap<String, String>();
+    contextInfo = new HashMap<>();
     contextInfo.put(CONTEXT_SERVER_TOKEN, SERVER_NAME);
     contextInfo.put(CONTEXT_PROJECT_TOKEN, PROJECT_NAME);
     contextInfo.put(CONTEXT_FLOW_TOKEN, FLOW_NAME);
@@ -49,7 +47,7 @@ public class JobCallbackUtilTest {
 
   @Test
   public void noCallbackPropertiesTest() {
-    Props props = new Props();
+    final Props props = new Props();
     props.put("abc", "def");
 
     Assert.assertFalse(JobCallbackUtil.isThereJobCallbackProperty(props,
@@ -67,8 +65,8 @@ public class JobCallbackUtilTest {
 
   @Test
   public void hasCallbackPropertiesTest() {
-    Props props = new Props();
-    for (JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum.values()) {
+    final Props props = new Props();
+    for (final JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum.values()) {
       props.put(
           "job.notification." + jobStatus.name().toLowerCase() + ".1.url",
           "def");
@@ -91,7 +89,7 @@ public class JobCallbackUtilTest {
 
   @Test
   public void multipleStatusWithNoJobCallbackTest() {
-    Props props = new Props();
+    final Props props = new Props();
     props.put("abc", "def");
 
     Assert.assertFalse(JobCallbackUtil.isThereJobCallbackProperty(props,
@@ -112,7 +110,7 @@ public class JobCallbackUtilTest {
 
     props = new Props();
     props.put("job.notification."
-        + JobCallbackStatusEnum.COMPLETED.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.COMPLETED.name().toLowerCase() + ".1.url",
         "def");
     Assert.assertTrue(JobCallbackUtil.isThereJobCallbackProperty(props,
         JobCallbackStatusEnum.STARTED, JobCallbackStatusEnum.COMPLETED,
@@ -135,8 +133,8 @@ public class JobCallbackUtilTest {
 
   @Test
   public void hasCallbackPropertiesWithGapTest() {
-    Props props = new Props();
-    for (JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum.values()) {
+    final Props props = new Props();
+    for (final JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum.values()) {
       props.put(
           "job.notification." + jobStatus.name().toLowerCase() + ".2.url",
           "def");
@@ -159,8 +157,8 @@ public class JobCallbackUtilTest {
 
   @Test
   public void noTokenTest() {
-    String urlWithNoToken = "http://www.linkedin.com";
-    String result =
+    final String urlWithNoToken = "http://www.linkedin.com";
+    final String result =
         JobCallbackUtil.replaceTokens(urlWithNoToken, contextInfo, true);
     Assert.assertEquals(urlWithNoToken, result);
   }
@@ -168,10 +166,10 @@ public class JobCallbackUtilTest {
   @Test
   public void oneTokenTest() {
 
-    String urlWithOneToken =
+    final String urlWithOneToken =
         "http://www.linkedin.com?project=" + CONTEXT_PROJECT_TOKEN + "&another=yes";
 
-    String result =
+    final String result =
         JobCallbackUtil.replaceTokens(urlWithOneToken, contextInfo, true);
     Assert.assertEquals("http://www.linkedin.com?project=" + PROJECT_NAME
         + "&another=yes", result);
@@ -180,11 +178,11 @@ public class JobCallbackUtilTest {
   @Test
   public void twoTokensTest() {
 
-    String urlWithOneToken =
+    final String urlWithOneToken =
         "http://www.linkedin.com?project=" + CONTEXT_PROJECT_TOKEN + "&flow="
             + CONTEXT_FLOW_TOKEN;
 
-    String result =
+    final String result =
         JobCallbackUtil.replaceTokens(urlWithOneToken, contextInfo, true);
     Assert.assertEquals("http://www.linkedin.com?project=" + PROJECT_NAME
         + "&flow=" + FLOW_NAME, result);
@@ -193,16 +191,16 @@ public class JobCallbackUtilTest {
   @Test
   public void allTokensTest() {
 
-    String urlWithOneToken =
+    final String urlWithOneToken =
         "http://www.linkedin.com?server=" + SERVER_NAME + "&project="
             + CONTEXT_PROJECT_TOKEN + "&flow=" + CONTEXT_FLOW_TOKEN + "&executionId="
             + CONTEXT_EXECUTION_ID_TOKEN + "&job=" + CONTEXT_JOB_TOKEN + "&status="
             + CONTEXT_JOB_STATUS_TOKEN;
 
-    String result =
+    final String result =
         JobCallbackUtil.replaceTokens(urlWithOneToken, contextInfo, true);
 
-    String expectedResult =
+    final String expectedResult =
         "http://www.linkedin.com?server=" + SERVER_NAME + "&project="
             + PROJECT_NAME + "&flow=" + FLOW_NAME + "&executionId="
             + EXECUTION_ID + "&job=" + JOB_NAME + "&status=" + JOB_STATUS_NAME;
@@ -212,11 +210,11 @@ public class JobCallbackUtilTest {
 
   @Test
   public void tokenWithEncoding() throws Exception {
-    String jobNameWithSpaces = "my job";
-    String encodedJobName = URLEncoder.encode(jobNameWithSpaces, "UTF-8");
+    final String jobNameWithSpaces = "my job";
+    final String encodedJobName = URLEncoder.encode(jobNameWithSpaces, "UTF-8");
 
-    Map<String, String> customContextInfo = new HashMap<String, String>();
-    customContextInfo = new HashMap<String, String>();
+    Map<String, String> customContextInfo = new HashMap<>();
+    customContextInfo = new HashMap<>();
     customContextInfo.put(CONTEXT_SERVER_TOKEN, SERVER_NAME);
     customContextInfo.put(CONTEXT_PROJECT_TOKEN, PROJECT_NAME);
     customContextInfo.put(CONTEXT_FLOW_TOKEN, FLOW_NAME);
@@ -224,10 +222,10 @@ public class JobCallbackUtilTest {
     customContextInfo.put(CONTEXT_JOB_TOKEN, jobNameWithSpaces);
     customContextInfo.put(CONTEXT_JOB_STATUS_TOKEN, JOB_STATUS_NAME);
 
-    String urlWithOneToken =
+    final String urlWithOneToken =
         "http://www.linkedin.com?job=" + CONTEXT_JOB_TOKEN + "&flow=" + CONTEXT_FLOW_TOKEN;
 
-    String result =
+    final String result =
         JobCallbackUtil.replaceTokens(urlWithOneToken, customContextInfo, true);
     Assert.assertEquals("http://www.linkedin.com?job=" + encodedJobName
         + "&flow=" + FLOW_NAME, result);
@@ -235,11 +233,11 @@ public class JobCallbackUtilTest {
 
   @Test
   public void parseJobCallbackOneGetTest() {
-    Props props = new Props();
-    String url = "http://lva1-rpt07.corp.linkedin.com";
+    final Props props = new Props();
+    final String url = "http://lva1-rpt07.corp.linkedin.com";
     props.put("job.notification."
         + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
-    List<HttpRequestBase> result =
+    final List<HttpRequestBase> result =
         JobCallbackUtil.parseJobCallbackProperties(props,
             JobCallbackStatusEnum.STARTED, contextInfo, 3);
 
@@ -250,11 +248,11 @@ public class JobCallbackUtilTest {
 
   @Test
   public void parseJobCallbackWithInvalidURLTest() {
-    Props props = new Props();
-    String url = "linkedin.com";
+    final Props props = new Props();
+    final String url = "linkedin.com";
     props.put("job.notification."
         + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
-    List<HttpRequestBase> result =
+    final List<HttpRequestBase> result =
         JobCallbackUtil.parseJobCallbackProperties(props,
             JobCallbackStatusEnum.STARTED, contextInfo, 3);
 
@@ -265,17 +263,17 @@ public class JobCallbackUtilTest {
 
   @Test
   public void parseJobCallbackTwoGetsTest() {
-    Props props = new Props();
-    String[] urls =
-        { "http://lva1-rpt07.corp.linkedin.com",
-            "http://lva1-rpt06.corp.linkedin.com" };
+    final Props props = new Props();
+    final String[] urls =
+        {"http://lva1-rpt07.corp.linkedin.com",
+            "http://lva1-rpt06.corp.linkedin.com"};
     props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url",
         urls[0]);
     props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".2.url",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".2.url",
         urls[1]);
-    List<HttpRequestBase> result =
+    final List<HttpRequestBase> result =
         JobCallbackUtil.parseJobCallbackProperties(props,
             JobCallbackStatusEnum.STARTED, contextInfo, 3);
 
@@ -288,17 +286,17 @@ public class JobCallbackUtilTest {
 
   @Test
   public void parseJobCallbackWithGapTest() {
-    Props props = new Props();
-    String[] urls =
-        { "http://lva1-rpt07.corp.linkedin.com",
-            "http://lva1-rpt06.corp.linkedin.com" };
+    final Props props = new Props();
+    final String[] urls =
+        {"http://lva1-rpt07.corp.linkedin.com",
+            "http://lva1-rpt06.corp.linkedin.com"};
     props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url",
         urls[0]);
     props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".3.url",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".3.url",
         urls[1]);
-    List<HttpRequestBase> result =
+    final List<HttpRequestBase> result =
         JobCallbackUtil.parseJobCallbackProperties(props,
             JobCallbackStatusEnum.STARTED, contextInfo, 3);
 
@@ -309,26 +307,26 @@ public class JobCallbackUtilTest {
 
   @Test
   public void parseJobCallbackWithPostTest() {
-    Props props = new Props();
-    String url = "http://lva1-rpt07.corp.linkedin.com";
-    String bodyText = "{name:\"you\"}";
+    final Props props = new Props();
+    final String url = "http://lva1-rpt07.corp.linkedin.com";
+    final String bodyText = "{name:\"you\"}";
     props.put("job.notification."
         + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.url", url);
     props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.method",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.method",
         HTTP_POST);
 
     props.put("job.notification."
-        + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.body",
+            + JobCallbackStatusEnum.STARTED.name().toLowerCase() + ".1.body",
         bodyText);
 
-    List<HttpRequestBase> result =
+    final List<HttpRequestBase> result =
         JobCallbackUtil.parseJobCallbackProperties(props,
             JobCallbackStatusEnum.STARTED, contextInfo, 3);
 
     Assert.assertEquals(1, result.size());
 
-    HttpPost httpPost = (HttpPost) result.get(0);
+    final HttpPost httpPost = (HttpPost) result.get(0);
 
     Assert.assertEquals(url, httpPost.getURI().toString());
     Assert.assertEquals(HTTP_POST, httpPost.getMethod());
@@ -340,7 +338,7 @@ public class JobCallbackUtilTest {
 
   @Test
   public void noHeaderElementTest() {
-    Header[] headerArr =
+    final Header[] headerArr =
         JobCallbackUtil.parseHttpHeaders("this is an amazing day");
 
     Assert.assertNotNull(headerArr);
@@ -349,9 +347,9 @@ public class JobCallbackUtilTest {
 
   @Test
   public void oneHeaderElementTest() {
-    String name = "Content-type";
-    String value = "application/json";
-    String headers =
+    final String name = "Content-type";
+    final String value = "application/json";
+    final String headers =
         name + JobCallbackConstants.HEADER_NAME_VALUE_DELIMITER + value;
     Header[] headerArr = JobCallbackUtil.parseHttpHeaders(headers);
 
@@ -360,7 +358,7 @@ public class JobCallbackUtilTest {
     Assert.assertEquals(name, headerArr[0].getName());
     Assert.assertEquals(value, headerArr[0].getValue());
 
-    String headersWithExtraDelimiter =
+    final String headersWithExtraDelimiter =
         name + JobCallbackConstants.HEADER_NAME_VALUE_DELIMITER + value
             + JobCallbackConstants.HEADER_ELEMENT_DELIMITER;
 
@@ -374,14 +372,14 @@ public class JobCallbackUtilTest {
 
   @Test
   public void multipleHeaderElementTest() {
-    String name1 = "Content-type";
-    String value1 = "application/json";
+    final String name1 = "Content-type";
+    final String value1 = "application/json";
 
-    String name2 = "Accept";
-    String value2 = "application/xml";
+    final String name2 = "Accept";
+    final String value2 = "application/xml";
 
-    String name3 = "User-Agent";
-    String value3 =
+    final String name3 = "User-Agent";
+    final String value3 =
         "Mozilla/5.0 (X11; Linux x86_64; rv:12.0) Gecko/20100101 Firefox/21.0";
 
     String headers = makeHeaderElement(name1, value1);
@@ -391,7 +389,7 @@ public class JobCallbackUtilTest {
     headers += makeHeaderElement(name3, value3);
 
     System.out.println("headers: " + headers);
-    Header[] headerArr = JobCallbackUtil.parseHttpHeaders(headers);
+    final Header[] headerArr = JobCallbackUtil.parseHttpHeaders(headers);
 
     Assert.assertNotNull(headerArr);
     Assert.assertEquals(3, headerArr.length);
@@ -405,14 +403,14 @@ public class JobCallbackUtilTest {
 
   @Test
   public void partialHeaderElementTest() {
-    String name1 = "Content-type";
-    String value1 = "application/json";
+    final String name1 = "Content-type";
+    final String value1 = "application/json";
 
-    String name2 = "Accept";
-    String value2 = "";
+    final String name2 = "Accept";
+    final String value2 = "";
 
-    String name3 = "User-Agent";
-    String value3 =
+    final String name3 = "User-Agent";
+    final String value3 =
         "Mozilla/5.0 (X11; Linux x86_64; rv:12.0) Gecko/20100101 Firefox/21.0";
 
     String headers = makeHeaderElement(name1, value1);
@@ -422,7 +420,7 @@ public class JobCallbackUtilTest {
     headers += makeHeaderElement(name3, value3);
 
     System.out.println("headers: " + headers);
-    Header[] headerArr = JobCallbackUtil.parseHttpHeaders(headers);
+    final Header[] headerArr = JobCallbackUtil.parseHttpHeaders(headers);
 
     Assert.assertNotNull(headerArr);
     Assert.assertEquals(3, headerArr.length);
@@ -434,7 +432,7 @@ public class JobCallbackUtilTest {
     Assert.assertEquals(value3, headerArr[2].getValue());
   }
 
-  private String makeHeaderElement(String name, String value) {
+  private String makeHeaderElement(final String name, final String value) {
     return name + JobCallbackConstants.HEADER_NAME_VALUE_DELIMITER + value;
   }
 
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/event/LocalFlowWatcherTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/event/LocalFlowWatcherTest.java
index 0570435..b93e9aa 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/event/LocalFlowWatcherTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/event/LocalFlowWatcherTest.java
@@ -16,18 +16,6 @@
 
 package azkaban.execapp.event;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-
-import org.apache.commons.io.FileUtils;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
 import azkaban.execapp.EventCollectorListener;
 import azkaban.execapp.FlowRunner;
 import azkaban.executor.ExecutableFlow;
@@ -39,13 +27,23 @@ import azkaban.executor.MockExecutorLoader;
 import azkaban.executor.Status;
 import azkaban.flow.Flow;
 import azkaban.jobtype.JobTypeManager;
+import azkaban.project.MockProjectLoader;
 import azkaban.project.Project;
 import azkaban.project.ProjectLoader;
-import azkaban.project.MockProjectLoader;
 import azkaban.utils.JSONUtils;
 import azkaban.utils.Props;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class LocalFlowWatcherTest {
+
   private File workingDir;
   private JobTypeManager jobtypeManager;
   private ProjectLoader fakeProjectLoader;
@@ -53,10 +51,10 @@ public class LocalFlowWatcherTest {
 
   @Before
   public void setUp() throws Exception {
-    jobtypeManager =
+    this.jobtypeManager =
         new JobTypeManager(null, null, this.getClass().getClassLoader());
-    jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
-    fakeProjectLoader = new MockProjectLoader(workingDir);
+    this.jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+    this.fakeProjectLoader = new MockProjectLoader(this.workingDir);
   }
 
   @After
@@ -65,34 +63,35 @@ public class LocalFlowWatcherTest {
 
   public File setupDirectory() throws IOException {
     System.out.println("Create temp dir");
-    File workingDir = new File("_AzkabanTestDir_" + dirVal);
+    final File workingDir = new File("_AzkabanTestDir_" + this.dirVal);
     if (workingDir.exists()) {
       FileUtils.deleteDirectory(workingDir);
     }
     workingDir.mkdirs();
-    dirVal++;
+    this.dirVal++;
 
     return workingDir;
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicLocalFlowWatcher() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
+    final MockExecutorLoader loader = new MockExecutorLoader();
 
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
 
-    File workingDir1 = setupDirectory();
-    FlowRunner runner1 =
+    final File workingDir1 = setupDirectory();
+    final FlowRunner runner1 =
         createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
             null);
-    Thread runner1Thread = new Thread(runner1);
+    final Thread runner1Thread = new Thread(runner1);
 
-    File workingDir2 = setupDirectory();
-    LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
-    FlowRunner runner2 =
+    final File workingDir2 = setupDirectory();
+    final LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
+    final FlowRunner runner2 =
         createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
             watcher, 2);
-    Thread runner2Thread = new Thread(runner2);
+    final Thread runner2Thread = new Thread(runner2);
 
     runner1Thread.start();
     runner2Thread.start();
@@ -104,24 +103,25 @@ public class LocalFlowWatcherTest {
     testPipelineLevel2(runner1.getExecutableFlow(), runner2.getExecutableFlow());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testLevel1LocalFlowWatcher() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
+    final MockExecutorLoader loader = new MockExecutorLoader();
 
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
 
-    File workingDir1 = setupDirectory();
-    FlowRunner runner1 =
+    final File workingDir1 = setupDirectory();
+    final FlowRunner runner1 =
         createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
             null);
-    Thread runner1Thread = new Thread(runner1);
+    final Thread runner1Thread = new Thread(runner1);
 
-    File workingDir2 = setupDirectory();
-    LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
-    FlowRunner runner2 =
+    final File workingDir2 = setupDirectory();
+    final LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
+    final FlowRunner runner2 =
         createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
             watcher, 1);
-    Thread runner2Thread = new Thread(runner2);
+    final Thread runner2Thread = new Thread(runner2);
 
     runner1Thread.start();
     runner2Thread.start();
@@ -133,24 +133,25 @@ public class LocalFlowWatcherTest {
     testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testLevel2DiffLocalFlowWatcher() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
+    final MockExecutorLoader loader = new MockExecutorLoader();
 
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
 
-    File workingDir1 = setupDirectory();
-    FlowRunner runner1 =
+    final File workingDir1 = setupDirectory();
+    final FlowRunner runner1 =
         createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
             null);
-    Thread runner1Thread = new Thread(runner1);
+    final Thread runner1Thread = new Thread(runner1);
 
-    File workingDir2 = setupDirectory();
-    LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
-    FlowRunner runner2 =
+    final File workingDir2 = setupDirectory();
+    final LocalFlowWatcher watcher = new LocalFlowWatcher(runner1);
+    final FlowRunner runner2 =
         createFlowRunner(workingDir2, loader, eventCollector, "exec1-mod", 2,
             watcher, 1);
-    Thread runner2Thread = new Thread(runner2);
+    final Thread runner2Thread = new Thread(runner2);
 
     runner1Thread.start();
     runner2Thread.start();
@@ -162,12 +163,12 @@ public class LocalFlowWatcherTest {
     testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
   }
 
-  private void testPipelineLevel1(ExecutableFlow first, ExecutableFlow second) {
-    for (ExecutableNode node : second.getExecutableNodes()) {
+  private void testPipelineLevel1(final ExecutableFlow first, final ExecutableFlow second) {
+    for (final ExecutableNode node : second.getExecutableNodes()) {
       Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
 
       // check it's start time is after the first's children.
-      ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+      final ExecutableNode watchedNode = first.getExecutableNode(node.getId());
       if (watchedNode == null) {
         continue;
       }
@@ -183,38 +184,38 @@ public class LocalFlowWatcherTest {
       long minParentDiff = 0;
       if (node.getInNodes().size() > 0) {
         minParentDiff = Long.MAX_VALUE;
-        for (String dependency : node.getInNodes()) {
-          ExecutableNode parent = second.getExecutableNode(dependency);
-          long diff = node.getStartTime() - parent.getEndTime();
+        for (final String dependency : node.getInNodes()) {
+          final ExecutableNode parent = second.getExecutableNode(dependency);
+          final long diff = node.getStartTime() - parent.getEndTime();
           minParentDiff = Math.min(minParentDiff, diff);
         }
       }
-      long diff = node.getStartTime() - watchedNode.getEndTime();
+      final long diff = node.getStartTime() - watchedNode.getEndTime();
       System.out.println("   minPipelineTimeDiff:" + diff
           + " minDependencyTimeDiff:" + minParentDiff);
       Assert.assertTrue(minParentDiff < 100 || diff < 100);
     }
   }
 
-  private void testPipelineLevel2(ExecutableFlow first, ExecutableFlow second) {
-    for (ExecutableNode node : second.getExecutableNodes()) {
+  private void testPipelineLevel2(final ExecutableFlow first, final ExecutableFlow second) {
+    for (final ExecutableNode node : second.getExecutableNodes()) {
       Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
 
       // check it's start time is after the first's children.
-      ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+      final ExecutableNode watchedNode = first.getExecutableNode(node.getId());
       if (watchedNode == null) {
         continue;
       }
       Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
 
       long minDiff = Long.MAX_VALUE;
-      for (String watchedChild : watchedNode.getOutNodes()) {
-        ExecutableNode child = first.getExecutableNode(watchedChild);
+      for (final String watchedChild : watchedNode.getOutNodes()) {
+        final ExecutableNode child = first.getExecutableNode(watchedChild);
         if (child == null) {
           continue;
         }
         Assert.assertEquals(child.getStatus(), Status.SUCCEEDED);
-        long diff = node.getStartTime() - child.getEndTime();
+        final long diff = node.getStartTime() - child.getEndTime();
         minDiff = Math.min(minDiff, diff);
         System.out.println("Node " + node.getId() + " start: "
             + node.getStartTime() + " dependent on " + watchedChild + " "
@@ -224,9 +225,9 @@ public class LocalFlowWatcherTest {
       }
 
       long minParentDiff = Long.MAX_VALUE;
-      for (String dependency : node.getInNodes()) {
-        ExecutableNode parent = second.getExecutableNode(dependency);
-        long diff = node.getStartTime() - parent.getEndTime();
+      for (final String dependency : node.getInNodes()) {
+        final ExecutableNode parent = second.getExecutableNode(dependency);
+        final long diff = node.getStartTime() - parent.getEndTime();
         minParentDiff = Math.min(minParentDiff, diff);
       }
       System.out.println("   minPipelineTimeDiff:" + minDiff
@@ -235,19 +236,21 @@ public class LocalFlowWatcherTest {
     }
   }
 
-  private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
-      EventCollectorListener eventCollector, String flowName, int execId,
-      FlowWatcher watcher, Integer pipeline) throws Exception {
-    return createFlowRunner(workingDir, loader, eventCollector, flowName, execId, watcher, pipeline, new Props());
+  private FlowRunner createFlowRunner(final File workingDir, final ExecutorLoader loader,
+      final EventCollectorListener eventCollector, final String flowName, final int execId,
+      final FlowWatcher watcher, final Integer pipeline) throws Exception {
+    return createFlowRunner(workingDir, loader, eventCollector, flowName, execId, watcher, pipeline,
+        new Props());
   }
 
-  private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
-      EventCollectorListener eventCollector, String flowName, int execId,
-      FlowWatcher watcher, Integer pipeline, Props azkabanProps) throws Exception {
-    File testDir = new File("unit/executions/exectest1");
-    ExecutableFlow exFlow =
+  private FlowRunner createFlowRunner(final File workingDir, final ExecutorLoader loader,
+      final EventCollectorListener eventCollector, final String flowName, final int execId,
+      final FlowWatcher watcher, final Integer pipeline, final Props azkabanProps)
+      throws Exception {
+    final File testDir = new File("unit/executions/exectest1");
+    final ExecutableFlow exFlow =
         prepareExecDir(workingDir, testDir, flowName, execId);
-    ExecutionOptions option = exFlow.getExecutionOptions();
+    final ExecutionOptions option = exFlow.getExecutionOptions();
     if (watcher != null) {
       option.setPipelineLevel(pipeline);
       option.setPipelineExecutionId(watcher.getExecId());
@@ -256,26 +259,25 @@ public class LocalFlowWatcherTest {
     // File(exFlow.getExecutionPath()));
 
     loader.uploadExecutableFlow(exFlow);
-    FlowRunner runner =
-        new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner =
+        new FlowRunner(exFlow, loader, this.fakeProjectLoader, this.jobtypeManager, azkabanProps);
     runner.setFlowWatcher(watcher);
     runner.addListener(eventCollector);
 
     return runner;
   }
 
-  private ExecutableFlow prepareExecDir(File workingDir, File execDir,
-      String flowName, int execId) throws IOException {
+  private ExecutableFlow prepareExecDir(final File workingDir, final File execDir,
+      final String flowName, final int execId) throws IOException {
     FileUtils.copyDirectory(execDir, workingDir);
 
-    File jsonFlowFile = new File(workingDir, flowName + ".flow");
-    @SuppressWarnings("unchecked")
-    HashMap<String, Object> flowObj =
+    final File jsonFlowFile = new File(workingDir, flowName + ".flow");
+    final HashMap<String, Object> flowObj =
         (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
 
-    Project project = new Project(1, "test");
-    Flow flow = Flow.flowFromObject(flowObj);
-    ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+    final Project project = new Project(1, "test");
+    final Flow flow = Flow.flowFromObject(flowObj);
+    final ExecutableFlow execFlow = new ExecutableFlow(project, flow);
     execFlow.setExecutionId(execId);
     execFlow.setExecutionPath(workingDir.getPath());
     return execFlow;
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/event/RemoteFlowWatcherTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/event/RemoteFlowWatcherTest.java
index 2b7197c..c549120 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/event/RemoteFlowWatcherTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/event/RemoteFlowWatcherTest.java
@@ -16,18 +16,6 @@
 
 package azkaban.execapp.event;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-
-import org.apache.commons.io.FileUtils;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
 import azkaban.execapp.EventCollectorListener;
 import azkaban.execapp.FlowRunner;
 import azkaban.executor.ExecutableFlow;
@@ -40,13 +28,23 @@ import azkaban.executor.MockExecutorLoader;
 import azkaban.executor.Status;
 import azkaban.flow.Flow;
 import azkaban.jobtype.JobTypeManager;
+import azkaban.project.MockProjectLoader;
 import azkaban.project.Project;
 import azkaban.project.ProjectLoader;
-import azkaban.project.MockProjectLoader;
 import azkaban.utils.JSONUtils;
 import azkaban.utils.Props;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class RemoteFlowWatcherTest {
+
   private File workingDir;
   private JobTypeManager jobtypeManager;
   private ProjectLoader fakeProjectLoader;
@@ -54,10 +52,10 @@ public class RemoteFlowWatcherTest {
 
   @Before
   public void setUp() throws Exception {
-    jobtypeManager =
+    this.jobtypeManager =
         new JobTypeManager(null, null, this.getClass().getClassLoader());
-    jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
-    fakeProjectLoader = new MockProjectLoader(workingDir);
+    this.jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+    this.fakeProjectLoader = new MockProjectLoader(this.workingDir);
   }
 
   @After
@@ -66,34 +64,35 @@ public class RemoteFlowWatcherTest {
 
   public File setupDirectory() throws IOException {
     System.out.println("Create temp dir");
-    File workingDir = new File("_AzkabanTestDir_" + dirVal);
+    final File workingDir = new File("_AzkabanTestDir_" + this.dirVal);
     if (workingDir.exists()) {
       FileUtils.deleteDirectory(workingDir);
     }
     workingDir.mkdirs();
-    dirVal++;
+    this.dirVal++;
 
     return workingDir;
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicRemoteFlowWatcher() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
+    final MockExecutorLoader loader = new MockExecutorLoader();
 
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
 
-    File workingDir1 = setupDirectory();
-    FlowRunner runner1 =
+    final File workingDir1 = setupDirectory();
+    final FlowRunner runner1 =
         createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
             null);
-    Thread runner1Thread = new Thread(runner1);
+    final Thread runner1Thread = new Thread(runner1);
 
-    File workingDir2 = setupDirectory();
-    RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
-    FlowRunner runner2 =
+    final File workingDir2 = setupDirectory();
+    final RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
+    final FlowRunner runner2 =
         createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
             watcher, 2);
-    Thread runner2Thread = new Thread(runner2);
+    final Thread runner2Thread = new Thread(runner2);
 
     printCurrentState("runner1 ", runner1.getExecutableFlow());
     runner1Thread.start();
@@ -107,24 +106,25 @@ public class RemoteFlowWatcherTest {
     testPipelineLevel2(runner1.getExecutableFlow(), runner2.getExecutableFlow());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testLevel1RemoteFlowWatcher() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
+    final MockExecutorLoader loader = new MockExecutorLoader();
 
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
 
-    File workingDir1 = setupDirectory();
-    FlowRunner runner1 =
+    final File workingDir1 = setupDirectory();
+    final FlowRunner runner1 =
         createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
             null);
-    Thread runner1Thread = new Thread(runner1);
+    final Thread runner1Thread = new Thread(runner1);
 
-    File workingDir2 = setupDirectory();
-    RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
-    FlowRunner runner2 =
+    final File workingDir2 = setupDirectory();
+    final RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
+    final FlowRunner runner2 =
         createFlowRunner(workingDir2, loader, eventCollector, "exec1", 2,
             watcher, 1);
-    Thread runner2Thread = new Thread(runner2);
+    final Thread runner2Thread = new Thread(runner2);
 
     runner1Thread.start();
     runner2Thread.start();
@@ -136,25 +136,26 @@ public class RemoteFlowWatcherTest {
     testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testLevel2DiffRemoteFlowWatcher() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
+    final MockExecutorLoader loader = new MockExecutorLoader();
 
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
 
-    File workingDir1 = setupDirectory();
-    FlowRunner runner1 =
+    final File workingDir1 = setupDirectory();
+    final FlowRunner runner1 =
         createFlowRunner(workingDir1, loader, eventCollector, "exec1", 1, null,
             null);
-    Thread runner1Thread = new Thread(runner1);
+    final Thread runner1Thread = new Thread(runner1);
 
-    File workingDir2 = setupDirectory();
+    final File workingDir2 = setupDirectory();
 
-    RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
-    FlowRunner runner2 =
+    final RemoteFlowWatcher watcher = new RemoteFlowWatcher(1, loader, 100);
+    final FlowRunner runner2 =
         createFlowRunner(workingDir2, loader, eventCollector, "exec1-mod", 2,
             watcher, 1);
-    Thread runner2Thread = new Thread(runner2);
+    final Thread runner2Thread = new Thread(runner2);
 
     runner1Thread.start();
     runner2Thread.start();
@@ -166,12 +167,12 @@ public class RemoteFlowWatcherTest {
     testPipelineLevel1(runner1.getExecutableFlow(), runner2.getExecutableFlow());
   }
 
-  private void testPipelineLevel1(ExecutableFlow first, ExecutableFlow second) {
-    for (ExecutableNode node : second.getExecutableNodes()) {
+  private void testPipelineLevel1(final ExecutableFlow first, final ExecutableFlow second) {
+    for (final ExecutableNode node : second.getExecutableNodes()) {
       Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
 
       // check it's start time is after the first's children.
-      ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+      final ExecutableNode watchedNode = first.getExecutableNode(node.getId());
       if (watchedNode == null) {
         continue;
       }
@@ -187,36 +188,36 @@ public class RemoteFlowWatcherTest {
       long minParentDiff = 0;
       if (node.getInNodes().size() > 0) {
         minParentDiff = Long.MAX_VALUE;
-        for (String dependency : node.getInNodes()) {
-          ExecutableNode parent = second.getExecutableNode(dependency);
-          long diff = node.getStartTime() - parent.getEndTime();
+        for (final String dependency : node.getInNodes()) {
+          final ExecutableNode parent = second.getExecutableNode(dependency);
+          final long diff = node.getStartTime() - parent.getEndTime();
           minParentDiff = Math.min(minParentDiff, diff);
         }
       }
-      long diff = node.getStartTime() - watchedNode.getEndTime();
+      final long diff = node.getStartTime() - watchedNode.getEndTime();
       Assert.assertTrue(minParentDiff < 500 || diff < 500);
     }
   }
 
-  private void testPipelineLevel2(ExecutableFlow first, ExecutableFlow second) {
-    for (ExecutableNode node : second.getExecutableNodes()) {
+  private void testPipelineLevel2(final ExecutableFlow first, final ExecutableFlow second) {
+    for (final ExecutableNode node : second.getExecutableNodes()) {
       Assert.assertEquals(node.getStatus(), Status.SUCCEEDED);
 
       // check it's start time is after the first's children.
-      ExecutableNode watchedNode = first.getExecutableNode(node.getId());
+      final ExecutableNode watchedNode = first.getExecutableNode(node.getId());
       if (watchedNode == null) {
         continue;
       }
       Assert.assertEquals(watchedNode.getStatus(), Status.SUCCEEDED);
 
       long minDiff = Long.MAX_VALUE;
-      for (String watchedChild : watchedNode.getOutNodes()) {
-        ExecutableNode child = first.getExecutableNode(watchedChild);
+      for (final String watchedChild : watchedNode.getOutNodes()) {
+        final ExecutableNode child = first.getExecutableNode(watchedChild);
         if (child == null) {
           continue;
         }
         Assert.assertEquals(child.getStatus(), Status.SUCCEEDED);
-        long diff = node.getStartTime() - child.getEndTime();
+        final long diff = node.getStartTime() - child.getEndTime();
         minDiff = Math.min(minDiff, diff);
         System.out.println("Node " + node.getId() + " start: "
             + node.getStartTime() + " dependent on " + watchedChild + " "
@@ -225,9 +226,9 @@ public class RemoteFlowWatcherTest {
       }
 
       long minParentDiff = Long.MAX_VALUE;
-      for (String dependency : node.getInNodes()) {
-        ExecutableNode parent = second.getExecutableNode(dependency);
-        long diff = node.getStartTime() - parent.getEndTime();
+      for (final String dependency : node.getInNodes()) {
+        final ExecutableNode parent = second.getExecutableNode(dependency);
+        final long diff = node.getStartTime() - parent.getEndTime();
         minParentDiff = Math.min(minParentDiff, diff);
       }
       System.out.println("   minPipelineTimeDiff:" + minDiff
@@ -236,19 +237,21 @@ public class RemoteFlowWatcherTest {
     }
   }
 
-  private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
-      EventCollectorListener eventCollector, String flowName, int execId,
-      FlowWatcher watcher, Integer pipeline) throws Exception {
-    return createFlowRunner(workingDir, loader, eventCollector, flowName, execId, watcher, pipeline, new Props());
+  private FlowRunner createFlowRunner(final File workingDir, final ExecutorLoader loader,
+      final EventCollectorListener eventCollector, final String flowName, final int execId,
+      final FlowWatcher watcher, final Integer pipeline) throws Exception {
+    return createFlowRunner(workingDir, loader, eventCollector, flowName, execId, watcher, pipeline,
+        new Props());
   }
 
-  private FlowRunner createFlowRunner(File workingDir, ExecutorLoader loader,
-      EventCollectorListener eventCollector, String flowName, int execId,
-      FlowWatcher watcher, Integer pipeline, Props azkabanProps) throws Exception {
-    File testDir = new File("unit/executions/exectest1");
-    ExecutableFlow exFlow =
+  private FlowRunner createFlowRunner(final File workingDir, final ExecutorLoader loader,
+      final EventCollectorListener eventCollector, final String flowName, final int execId,
+      final FlowWatcher watcher, final Integer pipeline, final Props azkabanProps)
+      throws Exception {
+    final File testDir = new File("unit/executions/exectest1");
+    final ExecutableFlow exFlow =
         prepareExecDir(workingDir, testDir, flowName, execId);
-    ExecutionOptions options = exFlow.getExecutionOptions();
+    final ExecutionOptions options = exFlow.getExecutionOptions();
     if (watcher != null) {
       options.setPipelineLevel(pipeline);
       options.setPipelineExecutionId(watcher.getExecId());
@@ -257,16 +260,16 @@ public class RemoteFlowWatcherTest {
     // File(exFlow.getExecutionPath()));
 
     loader.uploadExecutableFlow(exFlow);
-    FlowRunner runner =
-        new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner =
+        new FlowRunner(exFlow, loader, this.fakeProjectLoader, this.jobtypeManager, azkabanProps);
     runner.setFlowWatcher(watcher);
     runner.addListener(eventCollector);
 
     return runner;
   }
 
-  private void printCurrentState(String prefix, ExecutableFlowBase flow) {
-    for (ExecutableNode node : flow.getExecutableNodes()) {
+  private void printCurrentState(final String prefix, final ExecutableFlowBase flow) {
+    for (final ExecutableNode node : flow.getExecutableNodes()) {
 
       System.err.println(prefix + node.getNestedId() + "->"
           + node.getStatus().name());
@@ -276,18 +279,17 @@ public class RemoteFlowWatcherTest {
     }
   }
 
-  private ExecutableFlow prepareExecDir(File workingDir, File execDir,
-      String flowName, int execId) throws IOException {
+  private ExecutableFlow prepareExecDir(final File workingDir, final File execDir,
+      final String flowName, final int execId) throws IOException {
     FileUtils.copyDirectory(execDir, workingDir);
 
-    File jsonFlowFile = new File(workingDir, flowName + ".flow");
-    @SuppressWarnings("unchecked")
-    HashMap<String, Object> flowObj =
+    final File jsonFlowFile = new File(workingDir, flowName + ".flow");
+    final HashMap<String, Object> flowObj =
         (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
 
-    Project project = new Project(1, "test");
-    Flow flow = Flow.flowFromObject(flowObj);
-    ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+    final Project project = new Project(1, "test");
+    final Flow flow = Flow.flowFromObject(flowObj);
+    final ExecutableFlow execFlow = new ExecutableFlow(project, flow);
     execFlow.setExecutionId(execId);
     execFlow.setExecutionPath(workingDir.getPath());
     return execFlow;
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/EventCollectorListener.java b/azkaban-exec-server/src/test/java/azkaban/execapp/EventCollectorListener.java
index 7a474e2..34bfc2b 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/EventCollectorListener.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/EventCollectorListener.java
@@ -16,43 +16,43 @@
 
 package azkaban.execapp;
 
+import azkaban.event.Event;
+import azkaban.event.Event.Type;
+import azkaban.event.EventListener;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
 
-import azkaban.event.EventListener;
-import azkaban.event.Event;
-import azkaban.event.Event.Type;
-
 public class EventCollectorListener implements EventListener {
-  private ArrayList<Event> eventList = new ArrayList<Event>();
-  private HashSet<Event.Type> filterOutTypes = new HashSet<Event.Type>();
 
-  public void setEventFilterOut(Event.Type... types) {
-    filterOutTypes.addAll(Arrays.asList(types));
+  private final ArrayList<Event> eventList = new ArrayList<>();
+  private final HashSet<Event.Type> filterOutTypes = new HashSet<>();
+
+  public void setEventFilterOut(final Event.Type... types) {
+    this.filterOutTypes.addAll(Arrays.asList(types));
   }
 
   @Override
-  public void handleEvent(Event event) {
-    if (!filterOutTypes.contains(event.getType())) {
-      eventList.add(event);
+  public void handleEvent(final Event event) {
+    if (!this.filterOutTypes.contains(event.getType())) {
+      this.eventList.add(event);
     }
   }
 
   public ArrayList<Event> getEventList() {
-    return eventList;
+    return this.eventList;
   }
 
   public void writeAllEvents() {
-    for (Event event : eventList) {
+    for (final Event event : this.eventList) {
       System.out.print(event.getType());
       System.out.print(",");
     }
   }
 
   public boolean checkOrdering() {
-    long time = 0;
-    for (Event event : eventList) {
+    final long time = 0;
+    for (final Event event : this.eventList) {
       if (time > event.getTime()) {
         return false;
       }
@@ -61,9 +61,9 @@ public class EventCollectorListener implements EventListener {
     return true;
   }
 
-  public void checkEventExists(Type[] types) {
+  public void checkEventExists(final Type[] types) {
     int index = 0;
-    for (Event event : eventList) {
+    for (final Event event : this.eventList) {
       if (event.getRunner() == null) {
         continue;
       }
@@ -72,7 +72,7 @@ public class EventCollectorListener implements EventListener {
         throw new RuntimeException("More events than expected. Got "
             + event.getType());
       }
-      Type type = types[index++];
+      final Type type = types[index++];
 
       if (type != event.getType()) {
         throw new RuntimeException("Got " + event.getType() + ", expected "
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowPreparerTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowPreparerTest.java
index 07797d6..84cd4e1 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowPreparerTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowPreparerTest.java
@@ -17,6 +17,10 @@
 
 package azkaban.execapp;
 
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
 import azkaban.executor.ExecutableFlow;
 import azkaban.project.ProjectFileHandler;
 import azkaban.storage.StorageManager;
@@ -29,11 +33,9 @@ import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
 
-import static org.junit.Assert.*;
-import static org.mockito.Mockito.*;
-
 
 public class FlowPreparerTest {
+
   public static final String SAMPLE_FLOW_01 = "sample_flow_01";
 
   final File executionsDir = new File("executions");
@@ -46,32 +48,34 @@ public class FlowPreparerTest {
   public void setUp() throws Exception {
     tearDown();
 
-    executionsDir.mkdirs();
-    projectsDir.mkdirs();
+    this.executionsDir.mkdirs();
+    this.projectsDir.mkdirs();
 
-    ClassLoader classLoader = getClass().getClassLoader();
-    File file = new File(classLoader.getResource(SAMPLE_FLOW_01 + ".zip").getFile());
+    final ClassLoader classLoader = getClass().getClassLoader();
+    final File file = new File(classLoader.getResource(SAMPLE_FLOW_01 + ".zip").getFile());
 
-    ProjectFileHandler projectFileHandler = mock(ProjectFileHandler.class);
+    final ProjectFileHandler projectFileHandler = mock(ProjectFileHandler.class);
     when(projectFileHandler.getFileType()).thenReturn("zip");
     when(projectFileHandler.getLocalFile()).thenReturn(file);
 
-    StorageManager storageManager = mock(StorageManager.class);
+    final StorageManager storageManager = mock(StorageManager.class);
     when(storageManager.getProjectFile(12, 34)).thenReturn(projectFileHandler);
 
-    instance = new FlowPreparer(storageManager, executionsDir, projectsDir, installedProjects);
+    this.instance = new FlowPreparer(storageManager, this.executionsDir, this.projectsDir,
+        this.installedProjects);
   }
 
   @After
   public void tearDown() throws Exception {
-    FileUtils.deleteDirectory(executionsDir);
-    FileUtils.deleteDirectory(projectsDir);
+    FileUtils.deleteDirectory(this.executionsDir);
+    FileUtils.deleteDirectory(this.projectsDir);
   }
 
   @Test
   public void testSetupProject() throws Exception {
-    ProjectVersion pv = new ProjectVersion(12, 34, new File(projectsDir, "sample_project_01"));
-    instance.setupProject(pv);
+    final ProjectVersion pv = new ProjectVersion(12, 34,
+        new File(this.projectsDir, "sample_project_01"));
+    this.instance.setupProject(pv);
 
     assertTrue(pv.getInstalledDir().exists());
     assertTrue(new File(pv.getInstalledDir(), "sample_flow_01").exists());
@@ -79,13 +83,13 @@ public class FlowPreparerTest {
 
   @Test
   public void testSetupFlow() throws Exception {
-    ExecutableFlow executableFlow = mock(ExecutableFlow.class);
+    final ExecutableFlow executableFlow = mock(ExecutableFlow.class);
     when(executableFlow.getExecutionId()).thenReturn(12345);
     when(executableFlow.getProjectId()).thenReturn(12);
     when(executableFlow.getVersion()).thenReturn(34);
 
-    instance.setup(executableFlow);
-    File execDir = new File(executionsDir, "12345");
+    this.instance.setup(executableFlow);
+    final File execDir = new File(this.executionsDir, "12345");
     assertTrue(execDir.exists());
     assertTrue(new File(execDir, SAMPLE_FLOW_01).exists());
   }
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPipelineTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPipelineTest.java
index dbf3de1..84d67fc 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPipelineTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPipelineTest.java
@@ -16,19 +16,6 @@
 
 package azkaban.execapp;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-import org.junit.Assert;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
 import azkaban.execapp.event.FlowWatcher;
 import azkaban.execapp.event.LocalFlowWatcher;
 import azkaban.executor.ExecutableFlow;
@@ -44,11 +31,22 @@ import azkaban.flow.Flow;
 import azkaban.jobtype.JobTypeManager;
 import azkaban.jobtype.JobTypePluginSet;
 import azkaban.project.DirectoryFlowLoader;
+import azkaban.project.MockProjectLoader;
 import azkaban.project.Project;
 import azkaban.project.ProjectLoader;
 import azkaban.project.ProjectManagerException;
-import azkaban.project.MockProjectLoader;
 import azkaban.utils.Props;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
 
 /**
  * Flows in this test:
@@ -68,17 +66,19 @@ import azkaban.utils.Props;
  *
  * jobd=innerFlow2
  * innerFlow2->innerJobA
+ *
  * @author rpark
  */
 public class FlowRunnerPipelineTest {
+
+  private static int id = 101;
+  private final Logger logger = Logger.getLogger(FlowRunnerTest2.class);
   private File workingDir;
   private JobTypeManager jobtypeManager;
   private ProjectLoader fakeProjectLoader;
   private ExecutorLoader fakeExecutorLoader;
-  private Logger logger = Logger.getLogger(FlowRunnerTest2.class);
   private Project project;
   private Map<String, Flow> flowMap;
-  private static int id = 101;
 
   public FlowRunnerPipelineTest() {
   }
@@ -86,23 +86,23 @@ public class FlowRunnerPipelineTest {
   @Before
   public void setUp() throws Exception {
     System.out.println("Create temp dir");
-    workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
-    if (workingDir.exists()) {
-      FileUtils.deleteDirectory(workingDir);
+    this.workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+    if (this.workingDir.exists()) {
+      FileUtils.deleteDirectory(this.workingDir);
     }
-    workingDir.mkdirs();
-    jobtypeManager =
+    this.workingDir.mkdirs();
+    this.jobtypeManager =
         new JobTypeManager(null, null, this.getClass().getClassLoader());
-    JobTypePluginSet pluginSet = jobtypeManager.getJobTypePluginSet();
+    final JobTypePluginSet pluginSet = this.jobtypeManager.getJobTypePluginSet();
 
     pluginSet.addPluginClass("java", JavaJob.class);
     pluginSet.addPluginClass("test", InteractiveTestJob.class);
-    fakeProjectLoader = new MockProjectLoader(workingDir);
-    fakeExecutorLoader = new MockExecutorLoader();
-    project = new Project(1, "testProject");
+    this.fakeProjectLoader = new MockProjectLoader(this.workingDir);
+    this.fakeExecutorLoader = new MockExecutorLoader();
+    this.project = new Project(1, "testProject");
 
-    File dir = new File("unit/executions/embedded2");
-    prepareProject(project, dir);
+    final File dir = new File("unit/executions/embedded2");
+    prepareProject(this.project, dir);
 
     InteractiveTestJob.clearTestJobs();
   }
@@ -110,47 +110,48 @@ public class FlowRunnerPipelineTest {
   @After
   public void tearDown() throws IOException {
     System.out.println("Teardown temp dir");
-    if (workingDir != null) {
-      FileUtils.deleteDirectory(workingDir);
-      workingDir = null;
+    if (this.workingDir != null) {
+      FileUtils.deleteDirectory(this.workingDir);
+      this.workingDir = null;
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicPipelineLevel1Run() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner previousRunner =
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner previousRunner =
         createFlowRunner(eventCollector, "jobf", "prev");
 
-    ExecutionOptions options = new ExecutionOptions();
+    final ExecutionOptions options = new ExecutionOptions();
     options.setPipelineExecutionId(previousRunner.getExecutableFlow()
         .getExecutionId());
     options.setPipelineLevel(1);
-    FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
-    FlowRunner pipelineRunner =
+    final FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
+    final FlowRunner pipelineRunner =
         createFlowRunner(eventCollector, "jobf", "pipe", options);
     pipelineRunner.setFlowWatcher(watcher);
 
-    Map<String, Status> previousExpectedStateMap =
-        new HashMap<String, Status>();
-    Map<String, Status> pipelineExpectedStateMap =
-        new HashMap<String, Status>();
-    Map<String, ExecutableNode> previousNodeMap =
-        new HashMap<String, ExecutableNode>();
-    Map<String, ExecutableNode> pipelineNodeMap =
-        new HashMap<String, ExecutableNode>();
+    final Map<String, Status> previousExpectedStateMap =
+        new HashMap<>();
+    final Map<String, Status> pipelineExpectedStateMap =
+        new HashMap<>();
+    final Map<String, ExecutableNode> previousNodeMap =
+        new HashMap<>();
+    final Map<String, ExecutableNode> pipelineNodeMap =
+        new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
-    ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
+    final ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
+    final ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
     createExpectedStateMap(previousFlow, previousExpectedStateMap,
         previousNodeMap);
     createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap,
         pipelineNodeMap);
 
-    Thread thread1 = runFlowRunnerInThread(previousRunner);
+    final Thread thread1 = runFlowRunnerInThread(previousRunner);
     pause(250);
-    Thread thread2 = runFlowRunnerInThread(pipelineRunner);
+    final Thread thread2 = runFlowRunnerInThread(pipelineRunner);
     pause(500);
 
     previousExpectedStateMap.put("joba", Status.RUNNING);
@@ -289,41 +290,42 @@ public class FlowRunnerPipelineTest {
     Assert.assertFalse(thread2.isAlive());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicPipelineLevel2Run() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner previousRunner =
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner previousRunner =
         createFlowRunner(eventCollector, "pipelineFlow", "prev");
 
-    ExecutionOptions options = new ExecutionOptions();
+    final ExecutionOptions options = new ExecutionOptions();
     options.setPipelineExecutionId(previousRunner.getExecutableFlow()
         .getExecutionId());
     options.setPipelineLevel(2);
-    FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
-    FlowRunner pipelineRunner =
+    final FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
+    final FlowRunner pipelineRunner =
         createFlowRunner(eventCollector, "pipelineFlow", "pipe", options);
     pipelineRunner.setFlowWatcher(watcher);
 
-    Map<String, Status> previousExpectedStateMap =
-        new HashMap<String, Status>();
-    Map<String, Status> pipelineExpectedStateMap =
-        new HashMap<String, Status>();
-    Map<String, ExecutableNode> previousNodeMap =
-        new HashMap<String, ExecutableNode>();
-    Map<String, ExecutableNode> pipelineNodeMap =
-        new HashMap<String, ExecutableNode>();
+    final Map<String, Status> previousExpectedStateMap =
+        new HashMap<>();
+    final Map<String, Status> pipelineExpectedStateMap =
+        new HashMap<>();
+    final Map<String, ExecutableNode> previousNodeMap =
+        new HashMap<>();
+    final Map<String, ExecutableNode> pipelineNodeMap =
+        new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
-    ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
+    final ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
+    final ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
     createExpectedStateMap(previousFlow, previousExpectedStateMap,
         previousNodeMap);
     createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap,
         pipelineNodeMap);
 
-    Thread thread1 = runFlowRunnerInThread(previousRunner);
+    final Thread thread1 = runFlowRunnerInThread(previousRunner);
     pause(250);
-    Thread thread2 = runFlowRunnerInThread(pipelineRunner);
+    final Thread thread2 = runFlowRunnerInThread(pipelineRunner);
     pause(250);
 
     previousExpectedStateMap.put("pipeline1", Status.RUNNING);
@@ -482,41 +484,42 @@ public class FlowRunnerPipelineTest {
     Assert.assertFalse(thread2.isAlive());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicPipelineLevel2Run2() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner previousRunner =
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner previousRunner =
         createFlowRunner(eventCollector, "pipeline1_2", "prev");
 
-    ExecutionOptions options = new ExecutionOptions();
+    final ExecutionOptions options = new ExecutionOptions();
     options.setPipelineExecutionId(previousRunner.getExecutableFlow()
         .getExecutionId());
     options.setPipelineLevel(2);
-    FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
-    FlowRunner pipelineRunner =
+    final FlowWatcher watcher = new LocalFlowWatcher(previousRunner);
+    final FlowRunner pipelineRunner =
         createFlowRunner(eventCollector, "pipeline1_2", "pipe", options);
     pipelineRunner.setFlowWatcher(watcher);
 
-    Map<String, Status> previousExpectedStateMap =
-        new HashMap<String, Status>();
-    Map<String, Status> pipelineExpectedStateMap =
-        new HashMap<String, Status>();
-    Map<String, ExecutableNode> previousNodeMap =
-        new HashMap<String, ExecutableNode>();
-    Map<String, ExecutableNode> pipelineNodeMap =
-        new HashMap<String, ExecutableNode>();
+    final Map<String, Status> previousExpectedStateMap =
+        new HashMap<>();
+    final Map<String, Status> pipelineExpectedStateMap =
+        new HashMap<>();
+    final Map<String, ExecutableNode> previousNodeMap =
+        new HashMap<>();
+    final Map<String, ExecutableNode> pipelineNodeMap =
+        new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
-    ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
+    final ExecutableFlow pipelineFlow = pipelineRunner.getExecutableFlow();
+    final ExecutableFlow previousFlow = previousRunner.getExecutableFlow();
     createExpectedStateMap(previousFlow, previousExpectedStateMap,
         previousNodeMap);
     createExpectedStateMap(pipelineFlow, pipelineExpectedStateMap,
         pipelineNodeMap);
 
-    Thread thread1 = runFlowRunnerInThread(previousRunner);
+    final Thread thread1 = runFlowRunnerInThread(previousRunner);
     pause(250);
-    Thread thread2 = runFlowRunnerInThread(pipelineRunner);
+    final Thread thread2 = runFlowRunnerInThread(pipelineRunner);
     pause(250);
 
     previousExpectedStateMap.put("pipeline1_1", Status.RUNNING);
@@ -603,22 +606,22 @@ public class FlowRunnerPipelineTest {
     Assert.assertFalse(thread2.isAlive());
   }
 
-  private Thread runFlowRunnerInThread(FlowRunner runner) {
-    Thread thread = new Thread(runner);
+  private Thread runFlowRunnerInThread(final FlowRunner runner) {
+    final Thread thread = new Thread(runner);
     thread.start();
     return thread;
   }
 
-  private void pause(long millisec) {
+  private void pause(final long millisec) {
     try {
       Thread.sleep(millisec);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
     }
   }
 
-  private void createExpectedStateMap(ExecutableFlowBase flow,
-      Map<String, Status> expectedStateMap, Map<String, ExecutableNode> nodeMap) {
-    for (ExecutableNode node : flow.getExecutableNodes()) {
+  private void createExpectedStateMap(final ExecutableFlowBase flow,
+      final Map<String, Status> expectedStateMap, final Map<String, ExecutableNode> nodeMap) {
+    for (final ExecutableNode node : flow.getExecutableNodes()) {
       expectedStateMap.put(node.getNestedId(), node.getStatus());
       nodeMap.put(node.getNestedId(), node);
 
@@ -629,11 +632,11 @@ public class FlowRunnerPipelineTest {
     }
   }
 
-  private void compareStates(Map<String, Status> expectedStateMap,
-      Map<String, ExecutableNode> nodeMap) {
-    for (String printedId : expectedStateMap.keySet()) {
-      Status expectedStatus = expectedStateMap.get(printedId);
-      ExecutableNode node = nodeMap.get(printedId);
+  private void compareStates(final Map<String, Status> expectedStateMap,
+      final Map<String, ExecutableNode> nodeMap) {
+    for (final String printedId : expectedStateMap.keySet()) {
+      final Status expectedStatus = expectedStateMap.get(printedId);
+      final ExecutableNode node = nodeMap.get(printedId);
       if (node == null) {
         System.out.println("id node: " + printedId + " doesn't exist.");
       }
@@ -645,21 +648,22 @@ public class FlowRunnerPipelineTest {
     }
   }
 
-  private void prepareProject(Project project, File directory) throws ProjectManagerException,
+  private void prepareProject(final Project project, final File directory)
+      throws ProjectManagerException,
       IOException {
-    DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), this.logger);
     loader.loadProjectFlow(project, directory);
     if (!loader.getErrors().isEmpty()) {
-      for (String error : loader.getErrors()) {
+      for (final String error : loader.getErrors()) {
         System.out.println(error);
       }
 
       throw new RuntimeException("Errors found in setup");
     }
 
-    flowMap = loader.getFlowMap();
-    project.setFlows(flowMap);
-    FileUtils.copyDirectory(directory, workingDir);
+    this.flowMap = loader.getFlowMap();
+    project.setFlows(this.flowMap);
+    FileUtils.copyDirectory(directory, this.workingDir);
   }
 
   // private void printCurrentState(String prefix, ExecutableFlowBase flow) {
@@ -672,37 +676,39 @@ public class FlowRunnerPipelineTest {
   //   }
   // }
   //
-  private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
-      String flowName, String groupName) throws Exception {
+  private FlowRunner createFlowRunner(final EventCollectorListener eventCollector,
+      final String flowName, final String groupName) throws Exception {
     return createFlowRunner(eventCollector, flowName, groupName,
         new ExecutionOptions(), new Props());
   }
 
-  private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
-      String flowName, String groupName, ExecutionOptions options) throws Exception {
+  private FlowRunner createFlowRunner(final EventCollectorListener eventCollector,
+      final String flowName, final String groupName, final ExecutionOptions options)
+      throws Exception {
     return createFlowRunner(eventCollector, flowName, groupName,
         options, new Props());
   }
 
-  private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
-      String flowName, String groupName, ExecutionOptions options, Props azkabanProps)
+  private FlowRunner createFlowRunner(final EventCollectorListener eventCollector,
+      final String flowName, final String groupName, final ExecutionOptions options,
+      final Props azkabanProps)
       throws Exception {
-    Flow flow = flowMap.get(flowName);
+    final Flow flow = this.flowMap.get(flowName);
 
-    int exId = id++;
-    ExecutableFlow exFlow = new ExecutableFlow(project, flow);
-    exFlow.setExecutionPath(workingDir.getPath());
+    final int exId = id++;
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
+    exFlow.setExecutionPath(this.workingDir.getPath());
     exFlow.setExecutionId(exId);
 
-    Map<String, String> flowParam = new HashMap<String, String>();
+    final Map<String, String> flowParam = new HashMap<>();
     flowParam.put("group", groupName);
     options.addAllFlowParameters(flowParam);
     exFlow.setExecutionOptions(options);
-    fakeExecutorLoader.uploadExecutableFlow(exFlow);
+    this.fakeExecutorLoader.uploadExecutableFlow(exFlow);
 
-    FlowRunner runner =
-        new FlowRunner(fakeExecutorLoader.fetchExecutableFlow(exId),
-            fakeExecutorLoader, fakeProjectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner =
+        new FlowRunner(this.fakeExecutorLoader.fetchExecutableFlow(exId),
+            this.fakeExecutorLoader, this.fakeProjectLoader, this.jobtypeManager, azkabanProps);
 
     runner.addListener(eventCollector);
 
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPropertyResolutionTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPropertyResolutionTest.java
index f13a13c..49bf3be 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPropertyResolutionTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerPropertyResolutionTest.java
@@ -16,20 +16,6 @@
 
 package azkaban.execapp;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableFlowBase;
 import azkaban.executor.ExecutableNode;
@@ -40,11 +26,22 @@ import azkaban.executor.MockExecutorLoader;
 import azkaban.flow.Flow;
 import azkaban.jobtype.JobTypeManager;
 import azkaban.project.DirectoryFlowLoader;
+import azkaban.project.MockProjectLoader;
 import azkaban.project.Project;
 import azkaban.project.ProjectLoader;
 import azkaban.project.ProjectManagerException;
-import azkaban.project.MockProjectLoader;
 import azkaban.utils.Props;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
 
 /**
  * Test the property resolution of jobs in a flow.
@@ -68,34 +65,35 @@ import azkaban.utils.Props;
  * properties than other jobs.
  */
 public class FlowRunnerPropertyResolutionTest {
+
+  private static int id = 101;
+  private final Logger logger = Logger.getLogger(FlowRunnerTest2.class);
   private File workingDir;
   private JobTypeManager jobtypeManager;
   private ProjectLoader fakeProjectLoader;
   private ExecutorLoader fakeExecutorLoader;
-  private Logger logger = Logger.getLogger(FlowRunnerTest2.class);
   private Project project;
   private Map<String, Flow> flowMap;
-  private static int id = 101;
 
   @Before
   public void setUp() throws Exception {
     System.out.println("Create temp dir");
-    workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
-    if (workingDir.exists()) {
-      FileUtils.deleteDirectory(workingDir);
+    this.workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+    if (this.workingDir.exists()) {
+      FileUtils.deleteDirectory(this.workingDir);
     }
-    workingDir.mkdirs();
-    jobtypeManager =
+    this.workingDir.mkdirs();
+    this.jobtypeManager =
         new JobTypeManager(null, null, this.getClass().getClassLoader());
-    jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
-    jobtypeManager.getJobTypePluginSet().addPluginClass("test",
+    this.jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+    this.jobtypeManager.getJobTypePluginSet().addPluginClass("test",
         InteractiveTestJob.class);
-    fakeProjectLoader = new MockProjectLoader(workingDir);
-    fakeExecutorLoader = new MockExecutorLoader();
-    project = new Project(1, "testProject");
+    this.fakeProjectLoader = new MockProjectLoader(this.workingDir);
+    this.fakeExecutorLoader = new MockExecutorLoader();
+    this.project = new Project(1, "testProject");
 
-    File dir = new File("unit/executions/execpropstest");
-    prepareProject(project, dir);
+    final File dir = new File("unit/executions/execpropstest");
+    prepareProject(this.project, dir);
 
     InteractiveTestJob.clearTestJobs();
   }
@@ -103,25 +101,24 @@ public class FlowRunnerPropertyResolutionTest {
   @After
   public void tearDown() throws IOException {
     System.out.println("Teardown temp dir");
-    if (workingDir != null) {
-      FileUtils.deleteDirectory(workingDir);
-      workingDir = null;
+    if (this.workingDir != null) {
+      FileUtils.deleteDirectory(this.workingDir);
+      this.workingDir = null;
     }
   }
 
   /**
    * Tests the basic flow resolution. Flow is defined in execpropstest
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testPropertyResolution() throws Exception {
-    HashMap<String, String> flowProps = new HashMap<String, String>();
+    final HashMap<String, String> flowProps = new HashMap<>();
     flowProps.put("props7", "flow7");
     flowProps.put("props6", "flow6");
     flowProps.put("props5", "flow5");
-    FlowRunner runner = createFlowRunner("job3", flowProps);
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final FlowRunner runner = createFlowRunner("job3", flowProps);
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
     createNodeMap(runner.getExecutableFlow(), nodeMap);
 
     // 1. Start flow. Job 2 should start
@@ -131,7 +128,7 @@ public class FlowRunnerPropertyResolutionTest {
     // Job 2 is a normal job.
     // Only the flow overrides and the shared properties matter
     ExecutableNode node = nodeMap.get("job2");
-    Props job2Props = node.getInputProps();
+    final Props job2Props = node.getInputProps();
     Assert.assertEquals("shared1", job2Props.get("props1"));
     Assert.assertEquals("job2", job2Props.get("props2"));
     Assert.assertEquals("moo3", job2Props.get("props3"));
@@ -144,14 +141,14 @@ public class FlowRunnerPropertyResolutionTest {
     // Job 1 is inside another flow, and is nested in a different directory
     // The priority order should be:
     // job1->innerflow->job2.output->flow.overrides->job1 shared props
-    Props job2Generated = new Props();
+    final Props job2Generated = new Props();
     job2Generated.put("props6", "gjob6");
     job2Generated.put("props9", "gjob9");
     job2Generated.put("props10", "gjob10");
     InteractiveTestJob.getTestJob("job2").succeedJob(job2Generated);
     pause(250);
     node = nodeMap.get("innerflow:job1");
-    Props job1Props = node.getInputProps();
+    final Props job1Props = node.getInputProps();
     Assert.assertEquals("job1", job1Props.get("props1"));
     Assert.assertEquals("job2", job1Props.get("props2"));
     Assert.assertEquals("job8", job1Props.get("props8"));
@@ -167,14 +164,14 @@ public class FlowRunnerPropertyResolutionTest {
     // The priority order should be:
     // job4->job1.output->innerflow->job2.output->flow.overrides->job4 shared
     // props
-    Props job1GeneratedProps = new Props();
+    final Props job1GeneratedProps = new Props();
     job1GeneratedProps.put("props9", "g2job9");
     job1GeneratedProps.put("props7", "g2job7");
     InteractiveTestJob.getTestJob("innerflow:job1").succeedJob(
         job1GeneratedProps);
     pause(250);
     node = nodeMap.get("innerflow:job4");
-    Props job4Props = node.getInputProps();
+    final Props job4Props = node.getInputProps();
     Assert.assertEquals("job8", job4Props.get("props8"));
     Assert.assertEquals("job9", job4Props.get("props9"));
     Assert.assertEquals("g2job7", job4Props.get("props7"));
@@ -189,14 +186,14 @@ public class FlowRunnerPropertyResolutionTest {
     // Job 3 is a normal job taking props from an embedded flow
     // The priority order should be:
     // job3->innerflow.output->flow.overrides->job3.sharedprops
-    Props job4GeneratedProps = new Props();
+    final Props job4GeneratedProps = new Props();
     job4GeneratedProps.put("props9", "g4job9");
     job4GeneratedProps.put("props6", "g4job6");
     InteractiveTestJob.getTestJob("innerflow:job4").succeedJob(
         job4GeneratedProps);
     pause(250);
     node = nodeMap.get("job3");
-    Props job3Props = node.getInputProps();
+    final Props job3Props = node.getInputProps();
     Assert.assertEquals("job3", job3Props.get("props3"));
     Assert.assertEquals("g4job6", job3Props.get("props6"));
     Assert.assertEquals("g4job9", job3Props.get("props9"));
@@ -207,49 +204,50 @@ public class FlowRunnerPropertyResolutionTest {
     Assert.assertEquals("moo4", job3Props.get("props4"));
   }
 
-  private void prepareProject(Project project, File directory) throws ProjectManagerException,
+  private void prepareProject(final Project project, final File directory)
+      throws ProjectManagerException,
       IOException {
-    DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), this.logger);
     loader.loadProjectFlow(project, directory);
     if (!loader.getErrors().isEmpty()) {
-      for (String error : loader.getErrors()) {
+      for (final String error : loader.getErrors()) {
         System.out.println(error);
       }
 
       throw new RuntimeException("Errors found in setup");
     }
 
-    flowMap = loader.getFlowMap();
-    project.setFlows(flowMap);
-    FileUtils.copyDirectory(directory, workingDir);
+    this.flowMap = loader.getFlowMap();
+    project.setFlows(this.flowMap);
+    FileUtils.copyDirectory(directory, this.workingDir);
   }
 
-  private FlowRunner createFlowRunner(String flowName,
-      HashMap<String, String> flowParams) throws Exception {
+  private FlowRunner createFlowRunner(final String flowName,
+      final HashMap<String, String> flowParams) throws Exception {
     return createFlowRunner(flowName, flowParams, new Props());
   }
 
-  private FlowRunner createFlowRunner(String flowName,
-      HashMap<String, String> flowParams, Props azkabanProps) throws Exception {
-    Flow flow = flowMap.get(flowName);
+  private FlowRunner createFlowRunner(final String flowName,
+      final HashMap<String, String> flowParams, final Props azkabanProps) throws Exception {
+    final Flow flow = this.flowMap.get(flowName);
 
-    int exId = id++;
-    ExecutableFlow exFlow = new ExecutableFlow(project, flow);
-    exFlow.setExecutionPath(workingDir.getPath());
+    final int exId = id++;
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
+    exFlow.setExecutionPath(this.workingDir.getPath());
     exFlow.setExecutionId(exId);
 
     exFlow.getExecutionOptions().addAllFlowParameters(flowParams);
-    fakeExecutorLoader.uploadExecutableFlow(exFlow);
+    this.fakeExecutorLoader.uploadExecutableFlow(exFlow);
 
-    FlowRunner runner =
-        new FlowRunner(fakeExecutorLoader.fetchExecutableFlow(exId),
-            fakeExecutorLoader, fakeProjectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner =
+        new FlowRunner(this.fakeExecutorLoader.fetchExecutableFlow(exId),
+            this.fakeExecutorLoader, this.fakeProjectLoader, this.jobtypeManager, azkabanProps);
     return runner;
   }
 
-  private void createNodeMap(ExecutableFlowBase flow,
-      Map<String, ExecutableNode> nodeMap) {
-    for (ExecutableNode node : flow.getExecutableNodes()) {
+  private void createNodeMap(final ExecutableFlowBase flow,
+      final Map<String, ExecutableNode> nodeMap) {
+    for (final ExecutableNode node : flow.getExecutableNodes()) {
       nodeMap.put(node.getNestedId(), node);
 
       if (node instanceof ExecutableFlowBase) {
@@ -258,16 +256,16 @@ public class FlowRunnerPropertyResolutionTest {
     }
   }
 
-  private Thread runFlowRunnerInThread(FlowRunner runner) {
-    Thread thread = new Thread(runner);
+  private Thread runFlowRunnerInThread(final FlowRunner runner) {
+    final Thread thread = new Thread(runner);
     thread.start();
     return thread;
   }
 
-  private void pause(long millisec) {
+  private void pause(final long millisec) {
     try {
       Thread.sleep(millisec);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
     }
   }
 }
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest.java
index c502def..98b06cf 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest.java
@@ -16,18 +16,6 @@
 
 package azkaban.execapp;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-
-import org.apache.commons.io.FileUtils;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
 import azkaban.event.Event;
 import azkaban.event.Event.Type;
 import azkaban.executor.ExecutableFlow;
@@ -41,13 +29,23 @@ import azkaban.executor.Status;
 import azkaban.flow.Flow;
 import azkaban.jobtype.JobTypeManager;
 import azkaban.jobtype.JobTypePluginSet;
+import azkaban.project.MockProjectLoader;
 import azkaban.project.Project;
 import azkaban.project.ProjectLoader;
-import azkaban.project.MockProjectLoader;
 import azkaban.utils.JSONUtils;
 import azkaban.utils.Props;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class FlowRunnerTest {
+
   private File workingDir;
   private JobTypeManager jobtypeManager;
   private ProjectLoader fakeProjectLoader;
@@ -60,18 +58,18 @@ public class FlowRunnerTest {
   public void setUp() throws Exception {
     System.out.println("Create temp dir");
     synchronized (this) {
-      workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
-      if (workingDir.exists()) {
-        FileUtils.deleteDirectory(workingDir);
+      this.workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+      if (this.workingDir.exists()) {
+        FileUtils.deleteDirectory(this.workingDir);
       }
-      workingDir.mkdirs();
+      this.workingDir.mkdirs();
     }
-    jobtypeManager =
+    this.jobtypeManager =
         new JobTypeManager(null, null, this.getClass().getClassLoader());
-    JobTypePluginSet pluginSet = jobtypeManager.getJobTypePluginSet();
+    final JobTypePluginSet pluginSet = this.jobtypeManager.getJobTypePluginSet();
     pluginSet.addPluginClass("java", JavaJob.class);
     pluginSet.addPluginClass("test", InteractiveTestJob.class);
-    fakeProjectLoader = new MockProjectLoader(workingDir);
+    this.fakeProjectLoader = new MockProjectLoader(this.workingDir);
 
     InteractiveTestJob.clearTestJobs();
   }
@@ -80,26 +78,27 @@ public class FlowRunnerTest {
   public void tearDown() throws IOException {
     System.out.println("Teardown temp dir");
     synchronized (this) {
-      if (workingDir != null) {
-        FileUtils.deleteDirectory(workingDir);
-        workingDir = null;
+      if (this.workingDir != null) {
+        FileUtils.deleteDirectory(this.workingDir);
+        this.workingDir = null;
       }
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void exec1Normal() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
+    final MockExecutorLoader loader = new MockExecutorLoader();
     // just making compile. may not work at all.
 
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
     eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
         Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
-    FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
+    final FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
 
     Assert.assertTrue(!runner.isKilled());
     runner.run();
-    ExecutableFlow exFlow = runner.getExecutableFlow();
+    final ExecutableFlow exFlow = runner.getExecutableFlow();
     Assert.assertTrue(exFlow.getStatus() == Status.SUCCEEDED);
     compareFinishedRuntime(runner);
 
@@ -114,22 +113,23 @@ public class FlowRunnerTest {
     testStatus(exFlow, "job10", Status.SUCCEEDED);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
-          Type.FLOW_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.FLOW_STARTED,
+          Type.FLOW_FINISHED});
+    } catch (final Exception e) {
       System.out.println(e.getMessage());
 
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void exec1Disabled() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
     eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
         Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
-    File testDir = new File("unit/executions/exectest1");
+    final File testDir = new File("unit/executions/exectest1");
     ExecutableFlow exFlow = prepareExecDir(testDir, "exec1", 1);
 
     // Disable couple in the middle and at the end.
@@ -138,7 +138,7 @@ public class FlowRunnerTest {
     exFlow.getExecutableNode("job5").setStatus(Status.DISABLED);
     exFlow.getExecutableNode("job10").setStatus(Status.DISABLED);
 
-    FlowRunner runner = createFlowRunner(exFlow, loader, eventCollector);
+    final FlowRunner runner = createFlowRunner(exFlow, loader, eventCollector);
 
     Assert.assertTrue(!runner.isKilled());
     Assert.assertTrue(exFlow.getStatus() == Status.READY);
@@ -160,28 +160,29 @@ public class FlowRunnerTest {
     testStatus(exFlow, "job10", Status.SKIPPED);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
-          Type.FLOW_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.FLOW_STARTED,
+          Type.FLOW_FINISHED});
+    } catch (final Exception e) {
       System.out.println(e.getMessage());
 
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void exec1Failed() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
     eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
         Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
-    File testDir = new File("unit/executions/exectest1");
-    ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
+    final File testDir = new File("unit/executions/exectest1");
+    final ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
 
-    FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
+    final FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
 
     runner.run();
-    ExecutableFlow exFlow = runner.getExecutableFlow();
+    final ExecutableFlow exFlow = runner.getExecutableFlow();
     Assert.assertTrue(!runner.isKilled());
     Assert.assertTrue("Flow status " + exFlow.getStatus(),
         exFlow.getStatus() == Status.FAILED);
@@ -198,29 +199,30 @@ public class FlowRunnerTest {
     testStatus(exFlow, "job10", Status.CANCELLED);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
-          Type.FLOW_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.FLOW_STARTED,
+          Type.FLOW_FINISHED});
+    } catch (final Exception e) {
       System.out.println(e.getMessage());
 
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void exec1FailedKillAll() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
     eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
         Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
-    File testDir = new File("unit/executions/exectest1");
-    ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
+    final File testDir = new File("unit/executions/exectest1");
+    final ExecutableFlow flow = prepareExecDir(testDir, "exec2", 1);
     flow.getExecutionOptions().setFailureAction(FailureAction.CANCEL_ALL);
 
-    FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
+    final FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
 
     runner.run();
-    ExecutableFlow exFlow = runner.getExecutableFlow();
+    final ExecutableFlow exFlow = runner.getExecutableFlow();
 
     Assert.assertTrue(runner.isKilled());
 
@@ -229,11 +231,10 @@ public class FlowRunnerTest {
         exFlow.getStatus() == Status.FAILED);
 
     try {
-        Thread.sleep(500);
-    } catch (InterruptedException e) {
+      Thread.sleep(500);
+    } catch (final InterruptedException e) {
     }
 
-
     testStatus(exFlow, "job1", Status.SUCCEEDED);
     testStatus(exFlow, "job2d", Status.FAILED);
     testStatus(exFlow, "job3", Status.CANCELLED);
@@ -246,39 +247,39 @@ public class FlowRunnerTest {
     testStatus(exFlow, "job10", Status.CANCELLED);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
-          Type.FLOW_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.FLOW_STARTED,
+          Type.FLOW_FINISHED});
+    } catch (final Exception e) {
       System.out.println(e.getMessage());
       eventCollector.writeAllEvents();
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void exec1FailedFinishRest() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
     eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
         Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
-    File testDir = new File("unit/executions/exectest1");
-    ExecutableFlow flow = prepareExecDir(testDir, "exec3", 1);
+    final File testDir = new File("unit/executions/exectest1");
+    final ExecutableFlow flow = prepareExecDir(testDir, "exec3", 1);
     flow.getExecutionOptions().setFailureAction(
         FailureAction.FINISH_ALL_POSSIBLE);
-    FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
+    final FlowRunner runner = createFlowRunner(flow, loader, eventCollector);
 
     runner.run();
-    ExecutableFlow exFlow = runner.getExecutableFlow();
+    final ExecutableFlow exFlow = runner.getExecutableFlow();
     Assert.assertTrue(
         "Expected flow " + Status.FAILED + " instead " + exFlow.getStatus(),
         exFlow.getStatus() == Status.FAILED);
 
     try {
       Thread.sleep(500);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
     }
 
-
     testStatus(exFlow, "job1", Status.SUCCEEDED);
     testStatus(exFlow, "job2d", Status.FAILED);
     testStatus(exFlow, "job3", Status.SUCCEEDED);
@@ -291,30 +292,31 @@ public class FlowRunnerTest {
     testStatus(exFlow, "job10", Status.CANCELLED);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
-          Type.FLOW_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.FLOW_STARTED,
+          Type.FLOW_FINISHED});
+    } catch (final Exception e) {
       System.out.println(e.getMessage());
       eventCollector.writeAllEvents();
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void execAndCancel() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
     eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
         Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
-    FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
+    final FlowRunner runner = createFlowRunner(loader, eventCollector, "exec1");
 
     Assert.assertTrue(!runner.isKilled());
-    Thread thread = new Thread(runner);
+    final Thread thread = new Thread(runner);
     thread.start();
 
     try {
       Thread.sleep(5000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
@@ -324,12 +326,12 @@ public class FlowRunnerTest {
 
     try {
       Thread.sleep(2000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       // TODO Auto-generated catch block
       e.printStackTrace();
     }
 
-    ExecutableFlow exFlow = runner.getExecutableFlow();
+    final ExecutableFlow exFlow = runner.getExecutableFlow();
     testStatus(exFlow, "job1", Status.SUCCEEDED);
     testStatus(exFlow, "job2", Status.SUCCEEDED);
     testStatus(exFlow, "job5", Status.CANCELLED);
@@ -345,26 +347,27 @@ public class FlowRunnerTest {
         exFlow.getStatus() == Status.KILLED);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.FLOW_STARTED,
-          Type.FLOW_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.FLOW_STARTED,
+          Type.FLOW_FINISHED});
+    } catch (final Exception e) {
       System.out.println(e.getMessage());
       eventCollector.writeAllEvents();
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void execRetries() throws Exception {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
     eventCollector.setEventFilterOut(Event.Type.JOB_FINISHED,
         Event.Type.JOB_STARTED, Event.Type.JOB_STATUS_CHANGED);
-    FlowRunner runner = createFlowRunner(loader, eventCollector, "exec4-retry");
+    final FlowRunner runner = createFlowRunner(loader, eventCollector, "exec4-retry");
 
     runner.run();
 
-    ExecutableFlow exFlow = runner.getExecutableFlow();
+    final ExecutableFlow exFlow = runner.getExecutableFlow();
     testStatus(exFlow, "job-retry", Status.SUCCEEDED);
     testStatus(exFlow, "job-pass", Status.SUCCEEDED);
     testStatus(exFlow, "job-retry-fail", Status.FAILED);
@@ -377,8 +380,8 @@ public class FlowRunnerTest {
         exFlow.getStatus() == Status.FAILED);
   }
 
-  private void testStatus(ExecutableFlow flow, String name, Status status) {
-    ExecutableNode node = flow.getExecutableNode(name);
+  private void testStatus(final ExecutableFlow flow, final String name, final Status status) {
+    final ExecutableNode node = flow.getExecutableNode(name);
 
     if (node.getStatus() != status) {
       Assert.fail("Status of job " + node.getId() + " is " + node.getStatus()
@@ -386,8 +389,8 @@ public class FlowRunnerTest {
     }
   }
 
-  private void testAttempts(ExecutableFlow flow, String name, int attempt) {
-    ExecutableNode node = flow.getExecutableNode(name);
+  private void testAttempts(final ExecutableFlow flow, final String name, final int attempt) {
+    final ExecutableNode node = flow.getExecutableNode(name);
 
     if (node.getAttempt() != attempt) {
       Assert.fail("Expected " + attempt + " got " + node.getAttempt()
@@ -395,39 +398,38 @@ public class FlowRunnerTest {
     }
   }
 
-  private ExecutableFlow prepareExecDir(File execDir, String flowName,
-      int execId) throws IOException {
+  private ExecutableFlow prepareExecDir(final File execDir, final String flowName,
+      final int execId) throws IOException {
     synchronized (this) {
-      FileUtils.copyDirectory(execDir, workingDir);
+      FileUtils.copyDirectory(execDir, this.workingDir);
     }
 
-    File jsonFlowFile = new File(workingDir, flowName + ".flow");
-    @SuppressWarnings("unchecked")
-    HashMap<String, Object> flowObj =
+    final File jsonFlowFile = new File(this.workingDir, flowName + ".flow");
+    final HashMap<String, Object> flowObj =
         (HashMap<String, Object>) JSONUtils.parseJSONFromFile(jsonFlowFile);
 
-    Project project = new Project(1, "myproject");
+    final Project project = new Project(1, "myproject");
     project.setVersion(2);
 
-    Flow flow = Flow.flowFromObject(flowObj);
-    ExecutableFlow execFlow = new ExecutableFlow(project, flow);
+    final Flow flow = Flow.flowFromObject(flowObj);
+    final ExecutableFlow execFlow = new ExecutableFlow(project, flow);
     execFlow.setExecutionId(execId);
-    execFlow.setExecutionPath(workingDir.getPath());
+    execFlow.setExecutionPath(this.workingDir.getPath());
     return execFlow;
   }
 
-  private void compareFinishedRuntime(FlowRunner runner) throws Exception {
-    ExecutableFlow flow = runner.getExecutableFlow();
-    for (String flowName : flow.getStartNodes()) {
-      ExecutableNode node = flow.getExecutableNode(flowName);
+  private void compareFinishedRuntime(final FlowRunner runner) throws Exception {
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    for (final String flowName : flow.getStartNodes()) {
+      final ExecutableNode node = flow.getExecutableNode(flowName);
       compareStartFinishTimes(flow, node, 0);
     }
   }
 
-  private void compareStartFinishTimes(ExecutableFlow flow,
-      ExecutableNode node, long previousEndTime) throws Exception {
-    long startTime = node.getStartTime();
-    long endTime = node.getEndTime();
+  private void compareStartFinishTimes(final ExecutableFlow flow,
+      final ExecutableNode node, final long previousEndTime) throws Exception {
+    final long startTime = node.getStartTime();
+    final long endTime = node.getEndTime();
 
     // If start time is < 0, so will the endtime.
     if (startTime <= 0) {
@@ -442,49 +444,51 @@ public class FlowRunnerTest {
     Assert.assertTrue("Start time for " + node.getId() + " is " + startTime
         + " and less than " + previousEndTime, startTime >= previousEndTime);
 
-    for (String outNode : node.getOutNodes()) {
-      ExecutableNode childNode = flow.getExecutableNode(outNode);
+    for (final String outNode : node.getOutNodes()) {
+      final ExecutableNode childNode = flow.getExecutableNode(outNode);
       compareStartFinishTimes(flow, childNode, endTime);
     }
   }
 
-  private FlowRunner createFlowRunner(ExecutableFlow flow,
-      ExecutorLoader loader, EventCollectorListener eventCollector) throws Exception {
+  private FlowRunner createFlowRunner(final ExecutableFlow flow,
+      final ExecutorLoader loader, final EventCollectorListener eventCollector) throws Exception {
     return createFlowRunner(flow, loader, eventCollector, new Props());
   }
 
-  private FlowRunner createFlowRunner(ExecutableFlow flow,
-      ExecutorLoader loader, EventCollectorListener eventCollector, Props azkabanProps)
+  private FlowRunner createFlowRunner(final ExecutableFlow flow,
+      final ExecutorLoader loader, final EventCollectorListener eventCollector,
+      final Props azkabanProps)
       throws Exception {
     // File testDir = new File("unit/executions/exectest1");
     // MockProjectLoader projectLoader = new MockProjectLoader(new
     // File(flow.getExecutionPath()));
 
     loader.uploadExecutableFlow(flow);
-    FlowRunner runner =
-        new FlowRunner(flow, loader, fakeProjectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner =
+        new FlowRunner(flow, loader, this.fakeProjectLoader, this.jobtypeManager, azkabanProps);
 
     runner.addListener(eventCollector);
 
     return runner;
   }
 
-  private FlowRunner createFlowRunner(ExecutorLoader loader,
-      EventCollectorListener eventCollector, String flowName) throws Exception {
+  private FlowRunner createFlowRunner(final ExecutorLoader loader,
+      final EventCollectorListener eventCollector, final String flowName) throws Exception {
     return createFlowRunner(loader, eventCollector, flowName, new Props());
   }
 
-  private FlowRunner createFlowRunner(ExecutorLoader loader,
-      EventCollectorListener eventCollector, String flowName, Props azkabanProps) throws Exception {
-    File testDir = new File("unit/executions/exectest1");
-    ExecutableFlow exFlow = prepareExecDir(testDir, flowName, 1);
+  private FlowRunner createFlowRunner(final ExecutorLoader loader,
+      final EventCollectorListener eventCollector, final String flowName, final Props azkabanProps)
+      throws Exception {
+    final File testDir = new File("unit/executions/exectest1");
+    final ExecutableFlow exFlow = prepareExecDir(testDir, flowName, 1);
     // MockProjectLoader projectLoader = new MockProjectLoader(new
     // File(exFlow.getExecutionPath()));
 
     loader.uploadExecutableFlow(exFlow);
 
-    FlowRunner runner =
-        new FlowRunner(exFlow, loader, fakeProjectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner =
+        new FlowRunner(exFlow, loader, this.fakeProjectLoader, this.jobtypeManager, azkabanProps);
 
     runner.addListener(eventCollector);
 
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest2.java b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest2.java
index ce4b5fc..decc17a 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest2.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/FlowRunnerTest2.java
@@ -16,20 +16,6 @@
 
 package azkaban.execapp;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-
-import org.junit.Assert;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
 import azkaban.executor.ExecutableFlow;
 import azkaban.executor.ExecutableFlowBase;
 import azkaban.executor.ExecutableNode;
@@ -43,11 +29,22 @@ import azkaban.flow.Flow;
 import azkaban.jobtype.JobTypeManager;
 import azkaban.jobtype.JobTypePluginSet;
 import azkaban.project.DirectoryFlowLoader;
+import azkaban.project.MockProjectLoader;
 import azkaban.project.Project;
 import azkaban.project.ProjectLoader;
 import azkaban.project.ProjectManagerException;
-import azkaban.project.MockProjectLoader;
 import azkaban.utils.Props;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
 
 /**
  * Test the flow run, especially with embedded flows.
@@ -59,48 +56,49 @@ import azkaban.utils.Props;
  * Flow jobf looks like the following:
  *
  *
- *       joba       joba1
- *      /  |  \      |
- *     /   |   \     |
- *  jobb  jobd jobc  |
- *     \   |   /    /
- *      \  |  /    /
- *        jobe    /
- *         |     /
- *         |    /
- *        jobf
+ * joba       joba1
+ * /  |  \      |
+ * /   |   \     |
+ * jobb  jobd jobc  |
+ * \   |   /    /
+ * \  |  /    /
+ * jobe    /
+ * |     /
+ * |    /
+ * jobf
  *
- *  The job 'jobb' is an embedded flow:
+ * The job 'jobb' is an embedded flow:
  *
- *  jobb:innerFlow
+ * jobb:innerFlow
  *
- *        innerJobA
- *        /       \
- *   innerJobB   innerJobC
- *        \       /
- *        innerFlow
+ * innerJobA
+ * /       \
+ * innerJobB   innerJobC
+ * \       /
+ * innerFlow
  *
  *
- *  The job 'jobd' is a simple embedded flow:
+ * The job 'jobd' is a simple embedded flow:
  *
- *  jobd:innerFlow2
+ * jobd:innerFlow2
  *
- *       innerJobA
- *           |
- *       innerFlow2
+ * innerJobA
+ * |
+ * innerFlow2
  *
- *  The following tests checks each stage of the flow run by forcing jobs to
- *  succeed or fail.
+ * The following tests checks each stage of the flow run by forcing jobs to
+ * succeed or fail.
  */
 public class FlowRunnerTest2 {
+
+  private static int id = 101;
+  private final Logger logger = Logger.getLogger(FlowRunnerTest2.class);
   private File workingDir;
   private JobTypeManager jobtypeManager;
   private ProjectLoader fakeProjectLoader;
   private ExecutorLoader fakeExecutorLoader;
-  private Logger logger = Logger.getLogger(FlowRunnerTest2.class);
   private Project project;
   private Map<String, Flow> flowMap;
-  private static int id=101;
 
   public FlowRunnerTest2() {
   }
@@ -108,23 +106,23 @@ public class FlowRunnerTest2 {
   @Before
   public void setUp() throws Exception {
     System.out.println("Create temp dir");
-    workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
-    if (workingDir.exists()) {
-      FileUtils.deleteDirectory(workingDir);
+    this.workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+    if (this.workingDir.exists()) {
+      FileUtils.deleteDirectory(this.workingDir);
     }
-    workingDir.mkdirs();
-    jobtypeManager = new JobTypeManager(null, null,
+    this.workingDir.mkdirs();
+    this.jobtypeManager = new JobTypeManager(null, null,
         this.getClass().getClassLoader());
-    JobTypePluginSet pluginSet = jobtypeManager.getJobTypePluginSet();
+    final JobTypePluginSet pluginSet = this.jobtypeManager.getJobTypePluginSet();
 
     pluginSet.addPluginClass("java", JavaJob.class);
     pluginSet.addPluginClass("test", InteractiveTestJob.class);
-    fakeProjectLoader = new MockProjectLoader(workingDir);
-    fakeExecutorLoader = new MockExecutorLoader();
-    project = new Project(1, "testProject");
+    this.fakeProjectLoader = new MockProjectLoader(this.workingDir);
+    this.fakeExecutorLoader = new MockExecutorLoader();
+    this.project = new Project(1, "testProject");
 
-    File dir = new File("unit/executions/embedded2");
-    prepareProject(project, dir);
+    final File dir = new File("unit/executions/embedded2");
+    prepareProject(this.project, dir);
 
     InteractiveTestJob.clearTestJobs();
   }
@@ -132,30 +130,29 @@ public class FlowRunnerTest2 {
   @After
   public void tearDown() throws IOException {
     System.out.println("Teardown temp dir");
-    if (workingDir != null) {
-      FileUtils.deleteDirectory(workingDir);
-      workingDir = null;
+    if (this.workingDir != null) {
+      FileUtils.deleteDirectory(this.workingDir);
+      this.workingDir = null;
     }
   }
 
   /**
    * Tests the basic successful flow run, and also tests all output variables
    * from each job.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicRun() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
 
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow flow = runner.getExecutableFlow();
+    final ExecutableFlow flow = runner.getExecutableFlow();
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -163,7 +160,7 @@ public class FlowRunnerTest2 {
     expectedStateMap.put("joba1", Status.RUNNING);
 
     compareStates(expectedStateMap, nodeMap);
-    Props joba = nodeMap.get("joba").getInputProps();
+    final Props joba = nodeMap.get("joba").getInputProps();
     Assert.assertEquals("joba.1", joba.get("param1"));
     Assert.assertEquals("test1.2", joba.get("param2"));
     Assert.assertEquals("test1.3", joba.get("param3"));
@@ -173,7 +170,7 @@ public class FlowRunnerTest2 {
     Assert.assertEquals("test2.7", joba.get("param7"));
     Assert.assertEquals("test2.8", joba.get("param8"));
 
-    Props joba1 = nodeMap.get("joba1").getInputProps();
+    final Props joba1 = nodeMap.get("joba1").getInputProps();
     Assert.assertEquals("test1.1", joba1.get("param1"));
     Assert.assertEquals("test1.2", joba1.get("param2"));
     Assert.assertEquals("test1.3", joba1.get("param3"));
@@ -196,16 +193,16 @@ public class FlowRunnerTest2 {
     expectedStateMap.put("jobb:innerJobA", Status.RUNNING);
     compareStates(expectedStateMap, nodeMap);
 
-    ExecutableNode node = nodeMap.get("jobb");
+    final ExecutableNode node = nodeMap.get("jobb");
     Assert.assertEquals(Status.RUNNING, node.getStatus());
-    Props jobb = node.getInputProps();
+    final Props jobb = node.getInputProps();
     Assert.assertEquals("override.4", jobb.get("param4"));
     // Test that jobb properties overwrites the output properties
     Assert.assertEquals("moo", jobb.get("testprops"));
     Assert.assertEquals("jobb", jobb.get("output.override"));
     Assert.assertEquals("joba", jobb.get("output.joba"));
 
-    Props jobbInnerJobA = nodeMap.get("jobb:innerJobA").getInputProps();
+    final Props jobbInnerJobA = nodeMap.get("jobb:innerJobA").getInputProps();
     Assert.assertEquals("test1.1", jobbInnerJobA.get("param1"));
     Assert.assertEquals("test1.2", jobbInnerJobA.get("param2"));
     Assert.assertEquals("test1.3", jobbInnerJobA.get("param3"));
@@ -225,7 +222,7 @@ public class FlowRunnerTest2 {
     expectedStateMap.put("jobb:innerJobB", Status.RUNNING);
     expectedStateMap.put("jobb:innerJobC", Status.RUNNING);
     compareStates(expectedStateMap, nodeMap);
-    Props jobbInnerJobB = nodeMap.get("jobb:innerJobB").getInputProps();
+    final Props jobbInnerJobB = nodeMap.get("jobb:innerJobB").getInputProps();
     Assert.assertEquals("test1.1", jobbInnerJobB.get("param1"));
     Assert.assertEquals("override.4", jobbInnerJobB.get("param4"));
     Assert.assertEquals("jobb.innerJobA",
@@ -242,7 +239,7 @@ public class FlowRunnerTest2 {
     expectedStateMap.put("jobb:innerFlow", Status.RUNNING);
     compareStates(expectedStateMap, nodeMap);
 
-    Props jobbInnerJobD = nodeMap.get("jobb:innerFlow").getInputProps();
+    final Props jobbInnerJobD = nodeMap.get("jobb:innerFlow").getInputProps();
     Assert.assertEquals("test1.1", jobbInnerJobD.get("param1"));
     Assert.assertEquals("override.4", jobbInnerJobD.get("param4"));
     Assert.assertEquals("jobb.innerJobB",
@@ -257,7 +254,7 @@ public class FlowRunnerTest2 {
     expectedStateMap.put("jobb:innerFlow", Status.SUCCEEDED);
     expectedStateMap.put("jobb", Status.SUCCEEDED);
     compareStates(expectedStateMap, nodeMap);
-    Props jobbOutput = nodeMap.get("jobb").getOutputProps();
+    final Props jobbOutput = nodeMap.get("jobb").getOutputProps();
     Assert.assertEquals("test1", jobbOutput.get("output1.jobb"));
     Assert.assertEquals("test2", jobbOutput.get("output2.jobb"));
 
@@ -277,7 +274,7 @@ public class FlowRunnerTest2 {
     expectedStateMap.put("jobe", Status.RUNNING);
     compareStates(expectedStateMap, nodeMap);
 
-    Props jobd = nodeMap.get("jobe").getInputProps();
+    final Props jobd = nodeMap.get("jobe").getInputProps();
     Assert.assertEquals("test1", jobd.get("output1.jobb"));
     Assert.assertEquals("jobc", jobd.get("output.jobc"));
 
@@ -303,23 +300,22 @@ public class FlowRunnerTest2 {
   /**
    * Tests a flow with Disabled jobs and flows. They should properly SKIP
    * executions
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testDisabledNormal() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
     flow.getExecutableNode("jobb").setStatus(Status.DISABLED);
-    ((ExecutableFlowBase)flow.getExecutableNode("jobd")).getExecutableNode(
+    ((ExecutableFlowBase) flow.getExecutableNode("jobd")).getExecutableNode(
         "innerJobA").setStatus(Status.DISABLED);
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -376,21 +372,20 @@ public class FlowRunnerTest2 {
    * Tests a failure with the default FINISH_CURRENTLY_RUNNING.
    * After the first failure, every job that started should complete, and the
    * rest of the jobs should be skipped.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testNormalFailure1() throws Exception {
     // Test propagation of KILLED status to embedded flows.
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -425,20 +420,20 @@ public class FlowRunnerTest2 {
 
   /**
    * Test #2 on the default failure case.
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testNormalFailure2() throws Exception {
     // Test propagation of KILLED status to embedded flows different branch
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -488,18 +483,19 @@ public class FlowRunnerTest2 {
     Assert.assertFalse(thread.isAlive());
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testNormalFailure3() throws Exception {
     // Test propagation of CANCELLED status to embedded flows different branch
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -562,22 +558,21 @@ public class FlowRunnerTest2 {
    * In this case, all jobs which have had its pre-requisite met can continue
    * to run. Finishes when the failure is propagated to the last node of the
    * flow.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testFailedFinishingFailure3() throws Exception {
     // Test propagation of KILLED status to embedded flows different branch
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf",
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf",
         FailureAction.FINISH_ALL_POSSIBLE);
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -644,22 +639,21 @@ public class FlowRunnerTest2 {
    *
    * Any jobs that are running will be assigned a KILLED state, and any nodes
    * which were skipped due to prior errors will be given a CANCELLED state.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testCancelOnFailure() throws Exception {
     // Test propagation of KILLED status to embedded flows different branch
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf",
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf",
         FailureAction.CANCEL_ALL);
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -708,23 +702,23 @@ public class FlowRunnerTest2 {
 
   /**
    * Tests retries after a failure
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testRetryOnFailure() throws Exception {
     // Test propagation of KILLED status to embedded flows different branch
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
     flow.getExecutableNode("joba").setStatus(Status.DISABLED);
-    ((ExecutableFlowBase)flow.getExecutableNode("jobb")).getExecutableNode(
+    ((ExecutableFlowBase) flow.getExecutableNode("jobb")).getExecutableNode(
         "innerFlow").setStatus(Status.DISABLED);
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -759,10 +753,10 @@ public class FlowRunnerTest2 {
     Assert.assertEquals(Status.FAILED_FINISHING, flow.getStatus());
     compareStates(expectedStateMap, nodeMap);
 
-    ExecutableNode node = nodeMap.get("jobd:innerFlow2");
-    ExecutableFlowBase base = node.getParentFlow();
-    for (String nodeId : node.getInNodes()) {
-      ExecutableNode inNode = base.getExecutableNode(nodeId);
+    final ExecutableNode node = nodeMap.get("jobd:innerFlow2");
+    final ExecutableFlowBase base = node.getParentFlow();
+    for (final String nodeId : node.getInNodes()) {
+      final ExecutableNode inNode = base.getExecutableNode(nodeId);
       System.out.println(inNode.getId() + " > " + inNode.getStatus());
     }
 
@@ -778,7 +772,6 @@ public class FlowRunnerTest2 {
     compareStates(expectedStateMap, nodeMap);
     Assert.assertTrue(thread.isAlive());
 
-
     InteractiveTestJob.getTestJob("jobb:innerJobB").succeedJob();
     InteractiveTestJob.getTestJob("jobb:innerJobC").succeedJob();
     InteractiveTestJob.getTestJob("jobd:innerFlow2").succeedJob();
@@ -817,22 +810,21 @@ public class FlowRunnerTest2 {
    * Tests the manual Killing of a flow. In this case, the flow is just fine
    * before the cancel
    * is called.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testCancel() throws Exception {
     // Test propagation of KILLED status to embedded flows different branch
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf",
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf",
         FailureAction.CANCEL_ALL);
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(1000);
 
     // After it starts up, only joba should be running
@@ -881,21 +873,20 @@ public class FlowRunnerTest2 {
 
   /**
    * Tests the manual invocation of cancel on a flow that is FAILED_FINISHING
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testManualCancelOnFailure() throws Exception {
     // Test propagation of KILLED status to embedded flows different branch
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
-    ExecutableFlow flow = runner.getExecutableFlow();
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final ExecutableFlow flow = runner.getExecutableFlow();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -950,21 +941,20 @@ public class FlowRunnerTest2 {
 
   /**
    * Tests that pause and resume work
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testPause() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
 
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow flow = runner.getExecutableFlow();
+    final ExecutableFlow flow = runner.getExecutableFlow();
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -1066,21 +1056,20 @@ public class FlowRunnerTest2 {
   /**
    * Test the condition for a manual invocation of a KILL (cancel) on a flow
    * that has been paused. The flow should unpause and be killed immediately.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testPauseKill() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf");
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf");
 
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow flow = runner.getExecutableFlow();
+    final ExecutableFlow flow = runner.getExecutableFlow();
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -1131,22 +1120,21 @@ public class FlowRunnerTest2 {
   /**
    * Tests the case where a failure occurs on a Paused flow. In this case, the
    * flow should stay paused.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testPauseFail() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf",
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf",
         FailureAction.FINISH_CURRENTLY_RUNNING);
 
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow flow = runner.getExecutableFlow();
+    final ExecutableFlow flow = runner.getExecutableFlow();
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -1202,22 +1190,21 @@ public class FlowRunnerTest2 {
   /**
    * Test the condition when a Finish all possible is called during a pause.
    * The Failure is not acted upon until the flow is resumed.
-   *
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testPauseFailFinishAll() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf",
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf",
         FailureAction.FINISH_ALL_POSSIBLE);
 
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow flow = runner.getExecutableFlow();
+    final ExecutableFlow flow = runner.getExecutableFlow();
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
 
     // After it starts up, only joba should be running
@@ -1278,21 +1265,21 @@ public class FlowRunnerTest2 {
   /**
    * Tests the case when a flow is paused and a failure causes a kill. The
    * flow should die immediately regardless of the 'paused' status.
-   * @throws Exception
    */
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testPauseFailKill() throws Exception {
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    FlowRunner runner = createFlowRunner(eventCollector, "jobf",
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final FlowRunner runner = createFlowRunner(eventCollector, "jobf",
         FailureAction.CANCEL_ALL);
 
-    Map<String, Status> expectedStateMap = new HashMap<String, Status>();
-    Map<String, ExecutableNode> nodeMap = new HashMap<String, ExecutableNode>();
+    final Map<String, Status> expectedStateMap = new HashMap<>();
+    final Map<String, ExecutableNode> nodeMap = new HashMap<>();
 
     // 1. START FLOW
-    ExecutableFlow flow = runner.getExecutableFlow();
+    final ExecutableFlow flow = runner.getExecutableFlow();
     createExpectedStateMap(flow, expectedStateMap, nodeMap);
-    Thread thread = runFlowRunnerInThread(runner);
+    final Thread thread = runFlowRunnerInThread(runner);
     pause(250);
     // After it starts up, only joba should be running
     expectedStateMap.put("joba", Status.RUNNING);
@@ -1335,38 +1322,37 @@ public class FlowRunnerTest2 {
   }
 
 
-  private Thread runFlowRunnerInThread(FlowRunner runner) {
-    Thread thread = new Thread(runner);
+  private Thread runFlowRunnerInThread(final FlowRunner runner) {
+    final Thread thread = new Thread(runner);
     thread.start();
     return thread;
   }
 
-  private void pause(long millisec) {
+  private void pause(final long millisec) {
     try {
       Thread.sleep(millisec);
-    }
-    catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
     }
   }
 
-  private void createExpectedStateMap(ExecutableFlowBase flow,
-      Map<String, Status> expectedStateMap,
-      Map<String, ExecutableNode> nodeMap) {
-    for (ExecutableNode node: flow.getExecutableNodes()) {
+  private void createExpectedStateMap(final ExecutableFlowBase flow,
+      final Map<String, Status> expectedStateMap,
+      final Map<String, ExecutableNode> nodeMap) {
+    for (final ExecutableNode node : flow.getExecutableNodes()) {
       expectedStateMap.put(node.getNestedId(), node.getStatus());
       nodeMap.put(node.getNestedId(), node);
       if (node instanceof ExecutableFlowBase) {
-        createExpectedStateMap((ExecutableFlowBase)node, expectedStateMap,
+        createExpectedStateMap((ExecutableFlowBase) node, expectedStateMap,
             nodeMap);
       }
     }
   }
 
-  private void compareStates(Map<String, Status> expectedStateMap,
-      Map<String, ExecutableNode> nodeMap) {
-    for (String printedId: expectedStateMap.keySet()) {
-      Status expectedStatus = expectedStateMap.get(printedId);
-      ExecutableNode node = nodeMap.get(printedId);
+  private void compareStates(final Map<String, Status> expectedStateMap,
+      final Map<String, ExecutableNode> nodeMap) {
+    for (final String printedId : expectedStateMap.keySet()) {
+      final Status expectedStatus = expectedStateMap.get(printedId);
+      final ExecutableNode node = nodeMap.get(printedId);
 
       if (expectedStatus != node.getStatus()) {
         Assert.fail("Expected values do not match for " + printedId
@@ -1376,54 +1362,55 @@ public class FlowRunnerTest2 {
     }
   }
 
-  private void prepareProject(Project project, File directory)
+  private void prepareProject(final Project project, final File directory)
       throws ProjectManagerException, IOException {
-    DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger);
+    final DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), this.logger);
     loader.loadProjectFlow(project, directory);
     if (!loader.getErrors().isEmpty()) {
-      for (String error: loader.getErrors()) {
+      for (final String error : loader.getErrors()) {
         System.out.println(error);
       }
 
       throw new RuntimeException("Errors found in setup");
     }
 
-    flowMap = loader.getFlowMap();
-    project.setFlows(flowMap);
-    FileUtils.copyDirectory(directory, workingDir);
+    this.flowMap = loader.getFlowMap();
+    project.setFlows(this.flowMap);
+    FileUtils.copyDirectory(directory, this.workingDir);
   }
 
-  private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
-      String flowName) throws Exception {
+  private FlowRunner createFlowRunner(final EventCollectorListener eventCollector,
+      final String flowName) throws Exception {
     return createFlowRunner(eventCollector, flowName,
         FailureAction.FINISH_CURRENTLY_RUNNING);
   }
 
-  private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
-      String flowName, FailureAction action) throws Exception {
+  private FlowRunner createFlowRunner(final EventCollectorListener eventCollector,
+      final String flowName, final FailureAction action) throws Exception {
     return createFlowRunner(eventCollector, flowName, action, new Props());
   }
 
-  private FlowRunner createFlowRunner(EventCollectorListener eventCollector,
-      String flowName, FailureAction action, Props azkabanProps) throws Exception {
-    Flow flow = flowMap.get(flowName);
+  private FlowRunner createFlowRunner(final EventCollectorListener eventCollector,
+      final String flowName, final FailureAction action, final Props azkabanProps)
+      throws Exception {
+    final Flow flow = this.flowMap.get(flowName);
 
-    int exId = id++;
-    ExecutableFlow exFlow = new ExecutableFlow(project, flow);
-    exFlow.setExecutionPath(workingDir.getPath());
+    final int exId = id++;
+    final ExecutableFlow exFlow = new ExecutableFlow(this.project, flow);
+    exFlow.setExecutionPath(this.workingDir.getPath());
     exFlow.setExecutionId(exId);
 
-    Map<String, String> flowParam = new HashMap<String, String>();
+    final Map<String, String> flowParam = new HashMap<>();
     flowParam.put("param4", "override.4");
     flowParam.put("param10", "override.10");
     flowParam.put("param11", "override.11");
     exFlow.getExecutionOptions().addAllFlowParameters(flowParam);
     exFlow.getExecutionOptions().setFailureAction(action);
-    fakeExecutorLoader.uploadExecutableFlow(exFlow);
+    this.fakeExecutorLoader.uploadExecutableFlow(exFlow);
 
-    FlowRunner runner = new FlowRunner(
-        fakeExecutorLoader.fetchExecutableFlow(exId), fakeExecutorLoader,
-        fakeProjectLoader, jobtypeManager, azkabanProps);
+    final FlowRunner runner = new FlowRunner(
+        this.fakeExecutorLoader.fetchExecutableFlow(exId), this.fakeExecutorLoader,
+        this.fakeProjectLoader, this.jobtypeManager, azkabanProps);
 
     runner.addListener(eventCollector);
 
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/JobRunnerTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/JobRunnerTest.java
index 632f8e7..49e02eb 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/JobRunnerTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/JobRunnerTest.java
@@ -16,19 +16,6 @@
 
 package azkaban.execapp;
 
-import java.io.File;
-import java.io.IOException;
-import java.util.HashSet;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Ignore;
-import org.junit.Test;
-
 import azkaban.event.Event;
 import azkaban.event.Event.Type;
 import azkaban.event.EventData;
@@ -42,11 +29,22 @@ import azkaban.executor.Status;
 import azkaban.jobExecutor.ProcessJob;
 import azkaban.jobtype.JobTypeManager;
 import azkaban.utils.Props;
+import java.io.File;
+import java.io.IOException;
+import java.util.HashSet;
+import org.apache.commons.io.FileUtils;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
 
 public class JobRunnerTest {
+
+  private final Logger logger = Logger.getLogger("JobRunnerTest");
   private File workingDir;
   private JobTypeManager jobtypeManager;
-  private Logger logger = Logger.getLogger("JobRunnerTest");
 
   public JobRunnerTest() {
 
@@ -55,33 +53,34 @@ public class JobRunnerTest {
   @Before
   public void setUp() throws Exception {
     System.out.println("Create temp dir");
-    workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
-    if (workingDir.exists()) {
-      FileUtils.deleteDirectory(workingDir);
+    this.workingDir = new File("_AzkabanTestDir_" + System.currentTimeMillis());
+    if (this.workingDir.exists()) {
+      FileUtils.deleteDirectory(this.workingDir);
     }
-    workingDir.mkdirs();
-    jobtypeManager =
+    this.workingDir.mkdirs();
+    this.jobtypeManager =
         new JobTypeManager(null, null, this.getClass().getClassLoader());
 
-    jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
+    this.jobtypeManager.getJobTypePluginSet().addPluginClass("java", JavaJob.class);
   }
 
   @After
   public void tearDown() throws IOException {
     System.out.println("Teardown temp dir");
-    if (workingDir != null) {
-      FileUtils.deleteDirectory(workingDir);
-      workingDir = null;
+    if (this.workingDir != null) {
+      FileUtils.deleteDirectory(this.workingDir);
+      this.workingDir = null;
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testBasicRun() {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    JobRunner runner =
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final JobRunner runner =
         createJobRunner(1, "testJob", 1, false, loader, eventCollector);
-    ExecutableNode node = runner.getNode();
+    final ExecutableNode node = runner.getNode();
 
     eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED, new EventData(node)));
     Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
@@ -96,8 +95,8 @@ public class JobRunnerTest {
     Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
     Assert.assertTrue(node.getEndTime() - node.getStartTime() > 1000);
 
-    File logFile = new File(runner.getLogFilePath());
-    Props outputProps = runner.getNode().getOutputProps();
+    final File logFile = new File(runner.getLogFilePath());
+    final Props outputProps = runner.getNode().getOutputProps();
     Assert.assertTrue(outputProps != null);
     Assert.assertTrue(logFile.exists());
 
@@ -105,20 +104,21 @@ public class JobRunnerTest {
 
     Assert.assertTrue(eventCollector.checkOrdering());
     try {
-      eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
-          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.JOB_STARTED,
+          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
+    } catch (final Exception e) {
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testFailedRun() {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    JobRunner runner =
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final JobRunner runner =
         createJobRunner(1, "testJob", 1, true, loader, eventCollector);
-    ExecutableNode node = runner.getNode();
+    final ExecutableNode node = runner.getNode();
 
     Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
         || runner.getStatus() != Status.FAILED);
@@ -129,8 +129,8 @@ public class JobRunnerTest {
     Assert.assertTrue(node.getStartTime() > 0 && node.getEndTime() > 0);
     Assert.assertTrue(node.getEndTime() - node.getStartTime() > 1000);
 
-    File logFile = new File(runner.getLogFilePath());
-    Props outputProps = runner.getNode().getOutputProps();
+    final File logFile = new File(runner.getLogFilePath());
+    final Props outputProps = runner.getNode().getOutputProps();
     Assert.assertTrue(outputProps == null);
     Assert.assertTrue(logFile.exists());
     Assert.assertTrue(eventCollector.checkOrdering());
@@ -138,20 +138,20 @@ public class JobRunnerTest {
     Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
-          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.JOB_STARTED,
+          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
+    } catch (final Exception e) {
       Assert.fail(e.getMessage());
     }
   }
 
   @Test
   public void testDisabledRun() {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    JobRunner runner =
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final JobRunner runner =
         createJobRunner(1, "testJob", 1, false, loader, eventCollector);
-    ExecutableNode node = runner.getNode();
+    final ExecutableNode node = runner.getNode();
 
     node.setStatus(Status.DISABLED);
 
@@ -166,7 +166,7 @@ public class JobRunnerTest {
     Assert.assertTrue(node.getEndTime() - node.getStartTime() < 10);
 
     // Log file and output files should not exist.
-    Props outputProps = runner.getNode().getOutputProps();
+    final Props outputProps = runner.getNode().getOutputProps();
     Assert.assertTrue(outputProps == null);
     Assert.assertTrue(runner.getLogFilePath() == null);
     Assert.assertTrue(eventCollector.checkOrdering());
@@ -174,20 +174,20 @@ public class JobRunnerTest {
     Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == null);
 
     try {
-      eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
-          Type.JOB_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.JOB_STARTED,
+          Type.JOB_FINISHED});
+    } catch (final Exception e) {
       Assert.fail(e.getMessage());
     }
   }
 
   @Test
   public void testPreKilledRun() {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    JobRunner runner =
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final JobRunner runner =
         createJobRunner(1, "testJob", 1, false, loader, eventCollector);
-    ExecutableNode node = runner.getNode();
+    final ExecutableNode node = runner.getNode();
 
     node.setStatus(Status.KILLED);
 
@@ -205,32 +205,33 @@ public class JobRunnerTest {
     Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == null);
 
     // Log file and output files should not exist.
-    Props outputProps = runner.getNode().getOutputProps();
+    final Props outputProps = runner.getNode().getOutputProps();
     Assert.assertTrue(outputProps == null);
     Assert.assertTrue(runner.getLogFilePath() == null);
     Assert.assertTrue(!runner.isKilled());
     try {
-      eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
-          Type.JOB_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.JOB_STARTED,
+          Type.JOB_FINISHED});
+    } catch (final Exception e) {
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   // todo: HappyRay investigate if it is worth fixing this test. If not, remove it.
   // The change history doesn't mention why this test was ignored.
   public void testCancelRun() throws InterruptedException {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    JobRunner runner =
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final JobRunner runner =
         createJobRunner(13, "testJob", 10, false, loader, eventCollector);
-    ExecutableNode node = runner.getNode();
+    final ExecutableNode node = runner.getNode();
 
     Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED
         || runner.getStatus() != Status.FAILED);
 
-    Thread thread = new Thread(runner);
+    final Thread thread = new Thread(runner);
     thread.start();
 
     Thread.sleep(2000);
@@ -246,31 +247,32 @@ public class JobRunnerTest {
     Assert.assertTrue(loader.getNodeUpdateCount(node.getId()) == 3);
 
     // Log file and output files should not exist.
-    File logFile = new File(runner.getLogFilePath());
-    Props outputProps = runner.getNode().getOutputProps();
+    final File logFile = new File(runner.getLogFilePath());
+    final Props outputProps = runner.getNode().getOutputProps();
     Assert.assertTrue(outputProps == null);
     Assert.assertTrue(logFile.exists());
     Assert.assertTrue(eventCollector.checkOrdering());
     Assert.assertTrue(runner.isKilled());
     try {
-      eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
-          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.JOB_STARTED,
+          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
+    } catch (final Exception e) {
       System.out.println(e.getMessage());
 
       Assert.fail(e.getMessage());
     }
   }
 
-  @Ignore @Test
+  @Ignore
+  @Test
   public void testDelayedExecutionJob() {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    JobRunner runner =
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final JobRunner runner =
         createJobRunner(1, "testJob", 1, false, loader, eventCollector);
     runner.setDelayStart(5000);
-    long startTime = System.currentTimeMillis();
-    ExecutableNode node = runner.getNode();
+    final long startTime = System.currentTimeMillis();
+    final ExecutableNode node = runner.getNode();
 
     eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED, new EventData(node)));
     Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED);
@@ -285,8 +287,8 @@ public class JobRunnerTest {
     Assert.assertTrue(node.getEndTime() - node.getStartTime() > 1000);
     Assert.assertTrue(node.getStartTime() - startTime >= 5000);
 
-    File logFile = new File(runner.getLogFilePath());
-    Props outputProps = runner.getNode().getOutputProps();
+    final File logFile = new File(runner.getLogFilePath());
+    final Props outputProps = runner.getNode().getOutputProps();
     Assert.assertTrue(outputProps != null);
     Assert.assertTrue(logFile.exists());
     Assert.assertFalse(runner.isKilled());
@@ -294,27 +296,27 @@ public class JobRunnerTest {
 
     Assert.assertTrue(eventCollector.checkOrdering());
     try {
-      eventCollector.checkEventExists(new Type[] { Type.JOB_STARTED,
-          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.JOB_STARTED,
+          Type.JOB_STATUS_CHANGED, Type.JOB_FINISHED});
+    } catch (final Exception e) {
       Assert.fail(e.getMessage());
     }
   }
 
   @Test
   public void testDelayedExecutionCancelledJob() throws InterruptedException {
-    MockExecutorLoader loader = new MockExecutorLoader();
-    EventCollectorListener eventCollector = new EventCollectorListener();
-    JobRunner runner =
+    final MockExecutorLoader loader = new MockExecutorLoader();
+    final EventCollectorListener eventCollector = new EventCollectorListener();
+    final JobRunner runner =
         createJobRunner(1, "testJob", 1, false, loader, eventCollector);
     runner.setDelayStart(5000);
-    long startTime = System.currentTimeMillis();
-    ExecutableNode node = runner.getNode();
+    final long startTime = System.currentTimeMillis();
+    final ExecutableNode node = runner.getNode();
 
     eventCollector.handleEvent(Event.create(null, Event.Type.JOB_STARTED, new EventData(node)));
     Assert.assertTrue(runner.getStatus() != Status.SUCCEEDED);
 
-    Thread thread = new Thread(runner);
+    final Thread thread = new Thread(runner);
     thread.start();
 
     Thread.sleep(2000);
@@ -332,50 +334,52 @@ public class JobRunnerTest {
     Assert.assertTrue(node.getStartTime() - startTime <= 5000);
     Assert.assertTrue(runner.isKilled());
 
-    File logFile = new File(runner.getLogFilePath());
-    Props outputProps = runner.getNode().getOutputProps();
+    final File logFile = new File(runner.getLogFilePath());
+    final Props outputProps = runner.getNode().getOutputProps();
     Assert.assertTrue(outputProps == null);
     Assert.assertTrue(logFile.exists());
 
     Assert.assertTrue(eventCollector.checkOrdering());
     try {
-      eventCollector.checkEventExists(new Type[] { Type.JOB_FINISHED });
-    } catch (Exception e) {
+      eventCollector.checkEventExists(new Type[]{Type.JOB_FINISHED});
+    } catch (final Exception e) {
       Assert.fail(e.getMessage());
     }
   }
 
-  private Props createProps(int sleepSec, boolean fail) {
-    Props props = new Props();
+  private Props createProps(final int sleepSec, final boolean fail) {
+    final Props props = new Props();
     props.put("type", "java");
 
     props.put(JavaJob.JOB_CLASS, SleepJavaJob.class.getName());
     props.put("seconds", sleepSec);
-    props.put(ProcessJob.WORKING_DIR, workingDir.getPath());
+    props.put(ProcessJob.WORKING_DIR, this.workingDir.getPath());
     props.put("fail", String.valueOf(fail));
 
     return props;
   }
 
-  private JobRunner createJobRunner(int execId, String name, int time,
-      boolean fail, ExecutorLoader loader, EventCollectorListener listener) {
+  private JobRunner createJobRunner(final int execId, final String name, final int time,
+      final boolean fail, final ExecutorLoader loader, final EventCollectorListener listener) {
     return createJobRunner(execId, name, time, fail, loader, listener, new Props());
   }
 
-  private JobRunner createJobRunner(int execId, String name, int time,
-      boolean fail, ExecutorLoader loader, EventCollectorListener listener, Props azkabanProps) {
-    ExecutableFlow flow = new ExecutableFlow();
+  private JobRunner createJobRunner(final int execId, final String name, final int time,
+      final boolean fail, final ExecutorLoader loader, final EventCollectorListener listener,
+      final Props azkabanProps) {
+    final ExecutableFlow flow = new ExecutableFlow();
     flow.setExecutionId(execId);
-    ExecutableNode node = new ExecutableNode();
+    final ExecutableNode node = new ExecutableNode();
     node.setId(name);
     node.setParentFlow(flow);
 
-    Props props = createProps(time, fail);
+    final Props props = createProps(time, fail);
     node.setInputProps(props);
-    HashSet<String> proxyUsers = new HashSet<String>();
+    final HashSet<String> proxyUsers = new HashSet<>();
     proxyUsers.add(flow.getSubmitUser());
-    JobRunner runner = new JobRunner(node, workingDir, loader, jobtypeManager, azkabanProps);
-    runner.setLogSettings(logger, "5MB", 4);
+    final JobRunner runner = new JobRunner(node, this.workingDir, loader, this.jobtypeManager,
+        azkabanProps);
+    runner.setLogSettings(this.logger, "5MB", 4);
 
     runner.addListener(listener);
     return runner;
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/ProjectVersionsTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/ProjectVersionsTest.java
index 221ba31..d2d85dd 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/ProjectVersionsTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/ProjectVersionsTest.java
@@ -18,7 +18,6 @@ package azkaban.execapp;
 
 import java.util.ArrayList;
 import java.util.Collections;
-
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -26,7 +25,7 @@ public class ProjectVersionsTest {
 
   @Test
   public void testVersionOrdering() {
-    ArrayList<ProjectVersion> pversion = new ArrayList<ProjectVersion>();
+    final ArrayList<ProjectVersion> pversion = new ArrayList<>();
     pversion.add(new ProjectVersion(1, 2));
     pversion.add(new ProjectVersion(1, 3));
     pversion.add(new ProjectVersion(1, 1));
@@ -34,7 +33,7 @@ public class ProjectVersionsTest {
     Collections.sort(pversion);
 
     int i = 0;
-    for (ProjectVersion version : pversion) {
+    for (final ProjectVersion version : pversion) {
       Assert.assertTrue(i < version.getVersion());
       i = version.getVersion();
     }
diff --git a/azkaban-exec-server/src/test/java/azkaban/execapp/StatisticsServletTest.java b/azkaban-exec-server/src/test/java/azkaban/execapp/StatisticsServletTest.java
index 1442a27..9a513b9 100644
--- a/azkaban-exec-server/src/test/java/azkaban/execapp/StatisticsServletTest.java
+++ b/azkaban-exec-server/src/test/java/azkaban/execapp/StatisticsServletTest.java
@@ -1,42 +1,18 @@
 package azkaban.execapp;
 
+import azkaban.executor.ExecutorInfo;
 import org.junit.Assert;
 import org.junit.Ignore;
 import org.junit.Test;
 
-import azkaban.executor.ExecutorInfo;
-
 @Ignore
 public class StatisticsServletTest {
-  private static class MockStatisticsServlet extends ServerStatisticsServlet {
-    /** */
-    private static final long serialVersionUID = 1L;
-
-    public ExecutorInfo getStastics() {
-      return cachedstats;
-    }
-
-    public long getUpdatedTime() {
-      return lastRefreshedTime;
-    }
-
-    public void callPopulateStatistics() {
-      this.populateStatistics(false);
-    }
 
-    public void callFillCpuUsage(ExecutorInfo stats) {
-      this.fillCpuUsage(stats);
-    }
-
-    public void callFillRemainingMemoryPercent(ExecutorInfo stats) {
-      this.fillRemainingMemoryPercent(stats);
-    }
-  }
-  private MockStatisticsServlet statServlet = new MockStatisticsServlet();
+  private final MockStatisticsServlet statServlet = new MockStatisticsServlet();
 
   @Test
   public void testFillMemory() {
-    ExecutorInfo stats = new ExecutorInfo();
+    final ExecutorInfo stats = new ExecutorInfo();
     this.statServlet.callFillRemainingMemoryPercent(stats);
     // assume any machine that runs this test should
     // have bash and top available and at least got some remaining memory.
@@ -46,7 +22,7 @@ public class StatisticsServletTest {
 
   @Test
   public void testFillCpu() {
-    ExecutorInfo stats = new ExecutorInfo();
+    final ExecutorInfo stats = new ExecutorInfo();
     this.statServlet.callFillCpuUsage(stats);
     Assert.assertTrue(stats.getCpuUsage() > 0);
   }
@@ -71,11 +47,37 @@ public class StatisticsServletTest {
 
     try {
       Thread.sleep(1000);
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
     }
 
     // make sure cache expires after timeout.
     this.statServlet.callPopulateStatistics();
     Assert.assertNotEquals(updatedTime, this.statServlet.getUpdatedTime());
   }
+
+  private static class MockStatisticsServlet extends ServerStatisticsServlet {
+
+    /** */
+    private static final long serialVersionUID = 1L;
+
+    public ExecutorInfo getStastics() {
+      return cachedstats;
+    }
+
+    public long getUpdatedTime() {
+      return lastRefreshedTime;
+    }
+
+    public void callPopulateStatistics() {
+      this.populateStatistics(false);
+    }
+
+    public void callFillCpuUsage(final ExecutorInfo stats) {
+      this.fillCpuUsage(stats);
+    }
+
+    public void callFillRemainingMemoryPercent(final ExecutorInfo stats) {
+      this.fillRemainingMemoryPercent(stats);
+    }
+  }
 }
diff --git a/azkaban-exec-server/src/test/resources/log4j.properties b/azkaban-exec-server/src/test/resources/log4j.properties
index fbc13fd..36c72cb 100644
--- a/azkaban-exec-server/src/test/resources/log4j.properties
+++ b/azkaban-exec-server/src/test/resources/log4j.properties
@@ -1,5 +1,4 @@
 log4j.rootLogger=INFO, Console
-
 log4j.appender.Console=org.apache.log4j.ConsoleAppender
 log4j.appender.Console.layout=org.apache.log4j.PatternLayout
 log4j.appender.Console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss.SSS Z} %p [%c{1}] %m%n
diff --git a/azkaban-hadoop-security-plugin/build.gradle b/azkaban-hadoop-security-plugin/build.gradle
index 9e6ac8a..56f6483 100644
--- a/azkaban-hadoop-security-plugin/build.gradle
+++ b/azkaban-hadoop-security-plugin/build.gradle
@@ -1,12 +1,12 @@
 apply plugin: 'distribution'
 
 dependencies {
-  compile project(":azkaban-common")
+    compile project(":azkaban-common")
 
-  compileOnly "org.apache.hadoop:hadoop-common:$hadoopVersion"
-  compileOnly "org.apache.hadoop:hadoop-mapreduce-client-common:$hadoopVersion"
-  compileOnly "org.apache.hadoop:hadoop-mapreduce-client-core:$hadoopVersion"
-  compileOnly "org.apache.hive:hive-metastore:$hiveVersion"
+    compileOnly "org.apache.hadoop:hadoop-common:$hadoopVersion"
+    compileOnly "org.apache.hadoop:hadoop-mapreduce-client-common:$hadoopVersion"
+    compileOnly "org.apache.hadoop:hadoop-mapreduce-client-core:$hadoopVersion"
+    compileOnly "org.apache.hive:hive-metastore:$hiveVersion"
 }
 
 /**
@@ -17,11 +17,11 @@ dependencies {
  * It is assumed that classpaths of hadoop, hive, pig, etc will be externally fed into the application.
  */
 distributions {
-  main {
-    contents {
-      from(jar) {
-        into 'lib'
-      }
+    main {
+        contents {
+            from(jar) {
+                into 'lib'
+            }
+        }
     }
-  }
 }
diff --git a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManager.java b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManager.java
index d6578d2..9273f99 100644
--- a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManager.java
+++ b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManager.java
@@ -16,16 +16,14 @@
 
 package azkaban.security.commons;
 
+import azkaban.utils.Props;
 import java.io.File;
 import java.io.IOException;
 import java.util.Properties;
-
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Props;
-
 public abstract class HadoopSecurityManager {
 
   public static final String ENABLE_PROXYING = "azkaban.should.proxy"; // boolean
@@ -42,6 +40,12 @@ public abstract class HadoopSecurityManager {
   public static final String OBTAIN_NAMENODE_TOKEN = "obtain.namenode.token";
   public static final String OBTAIN_HCAT_TOKEN = "obtain.hcat.token";
 
+  public static boolean shouldProxy(final Properties prop) {
+    final String shouldProxy = prop.getProperty(ENABLE_PROXYING);
+
+    return shouldProxy != null && shouldProxy.equals("true");
+  }
+
   public boolean isHadoopSecurityEnabled()
       throws HadoopSecurityManagerException {
     return false;
@@ -68,12 +72,6 @@ public abstract class HadoopSecurityManager {
   public abstract FileSystem getFSAsUser(String user)
       throws HadoopSecurityManagerException;
 
-  public static boolean shouldProxy(Properties prop) {
-    String shouldProxy = prop.getProperty(ENABLE_PROXYING);
-
-    return shouldProxy != null && shouldProxy.equals("true");
-  }
-
   public abstract void prefetchToken(File tokenFile, String userToProxy,
       Logger logger) throws HadoopSecurityManagerException;
 
diff --git a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManagerException.java b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManagerException.java
index 5b55d5f..304be07 100644
--- a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManagerException.java
+++ b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/HadoopSecurityManagerException.java
@@ -17,13 +17,14 @@
 package azkaban.security.commons;
 
 public class HadoopSecurityManagerException extends Exception {
+
   private static final long serialVersionUID = 1L;
 
-  public HadoopSecurityManagerException(String message) {
+  public HadoopSecurityManagerException(final String message) {
     super(message);
   }
 
-  public HadoopSecurityManagerException(String message, Throwable cause) {
+  public HadoopSecurityManagerException(final String message, final Throwable cause) {
     super(message, cause);
   }
 }
diff --git a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/SecurityUtils.java b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/SecurityUtils.java
index ef4cc35..86882a4 100644
--- a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/SecurityUtils.java
+++ b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/commons/SecurityUtils.java
@@ -16,6 +16,13 @@
 
 package azkaban.security.commons;
 
+import azkaban.utils.Props;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Properties;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.io.Text;
@@ -27,22 +34,16 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.log4j.Logger;
 
-import azkaban.utils.Props;
-
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.security.PrivilegedExceptionAction;
-import java.util.Properties;
-
 public class SecurityUtils {
+
   // Secure Hadoop proxy user params
   public static final String ENABLE_PROXYING = "azkaban.should.proxy"; // boolean
   public static final String PROXY_KEYTAB_LOCATION = "proxy.keytab.location";
   public static final String PROXY_USER = "proxy.user";
   public static final String TO_PROXY = "user.to.proxy";
-
+  public static final String OBTAIN_BINARY_TOKEN = "obtain.binary.token";
+  public static final String MAPREDUCE_JOB_CREDENTIALS_BINARY =
+      "mapreduce.job.credentials.binary";
   private static UserGroupInformation loginUser = null;
 
   /**
@@ -50,7 +51,7 @@ public class SecurityUtils {
    * parameters necessary from properties file.
    */
   public static synchronized UserGroupInformation getProxiedUser(
-      String toProxy, Properties prop, Logger log, Configuration conf)
+      final String toProxy, final Properties prop, final Logger log, final Configuration conf)
       throws IOException {
 
     if (conf == null) {
@@ -64,8 +65,8 @@ public class SecurityUtils {
 
     if (loginUser == null) {
       log.info("No login user. Creating login user");
-      String keytab = verifySecureProperty(prop, PROXY_KEYTAB_LOCATION, log);
-      String proxyUser = verifySecureProperty(prop, PROXY_USER, log);
+      final String keytab = verifySecureProperty(prop, PROXY_KEYTAB_LOCATION, log);
+      final String proxyUser = verifySecureProperty(prop, PROXY_USER, log);
       UserGroupInformation.loginUserFromKeytab(proxyUser, keytab);
       loginUser = UserGroupInformation.getLoginUser();
       log.info("Logged in with user " + loginUser);
@@ -81,39 +82,37 @@ public class SecurityUtils {
    * Create a proxied user, taking all parameters, including which user to proxy
    * from provided Properties.
    */
-  public static UserGroupInformation getProxiedUser(Properties prop,
-      Logger log, Configuration conf) throws IOException {
-    String toProxy = verifySecureProperty(prop, TO_PROXY, log);
-    UserGroupInformation user = getProxiedUser(toProxy, prop, log, conf);
-    if (user == null)
+  public static UserGroupInformation getProxiedUser(final Properties prop,
+      final Logger log, final Configuration conf) throws IOException {
+    final String toProxy = verifySecureProperty(prop, TO_PROXY, log);
+    final UserGroupInformation user = getProxiedUser(toProxy, prop, log, conf);
+    if (user == null) {
       throw new IOException(
           "Proxy as any user in unsecured grid is not supported!"
               + prop.toString());
+    }
     log.info("created proxy user for " + user.getUserName() + user.toString());
     return user;
   }
 
-  public static String verifySecureProperty(Properties properties, String s,
-      Logger l) throws IOException {
-    String value = properties.getProperty(s);
+  public static String verifySecureProperty(final Properties properties, final String s,
+      final Logger l) throws IOException {
+    final String value = properties.getProperty(s);
 
-    if (value == null)
+    if (value == null) {
       throw new IOException(s
           + " not set in properties. Cannot use secure proxy");
+    }
     l.info("Secure proxy configuration: Property " + s + " = " + value);
     return value;
   }
 
-  public static boolean shouldProxy(Properties prop) {
-    String shouldProxy = prop.getProperty(ENABLE_PROXYING);
+  public static boolean shouldProxy(final Properties prop) {
+    final String shouldProxy = prop.getProperty(ENABLE_PROXYING);
 
     return shouldProxy != null && shouldProxy.equals("true");
   }
 
-  public static final String OBTAIN_BINARY_TOKEN = "obtain.binary.token";
-  public static final String MAPREDUCE_JOB_CREDENTIALS_BINARY =
-      "mapreduce.job.credentials.binary";
-
   public static synchronized void prefetchToken(final File tokenFile,
       final Props p, final Logger logger) throws InterruptedException,
       IOException {
@@ -130,24 +129,24 @@ public class SecurityUtils {
             return null;
           }
 
-          private void getToken(Props p) throws InterruptedException,
+          private void getToken(final Props p) throws InterruptedException,
               IOException {
-            String shouldPrefetch = p.getString(OBTAIN_BINARY_TOKEN);
+            final String shouldPrefetch = p.getString(OBTAIN_BINARY_TOKEN);
             if (shouldPrefetch != null && shouldPrefetch.equals("true")) {
               logger.info("Pre-fetching token");
 
               logger.info("Pre-fetching fs token");
-              FileSystem fs = FileSystem.get(conf);
-              Token<?> fsToken =
+              final FileSystem fs = FileSystem.get(conf);
+              final Token<?> fsToken =
                   fs.getDelegationToken(p.getString("user.to.proxy"));
               logger.info("Created token: " + fsToken.toString());
 
-              Job job =
+              final Job job =
                   new Job(conf, "totally phony, extremely fake, not real job");
-              JobConf jc = new JobConf(conf);
-              JobClient jobClient = new JobClient(jc);
+              final JobConf jc = new JobConf(conf);
+              final JobClient jobClient = new JobClient(jc);
               logger.info("Pre-fetching job token: Got new JobClient: " + jc);
-              Token<DelegationTokenIdentifier> mrdt =
+              final Token<DelegationTokenIdentifier> mrdt =
                   jobClient.getDelegationToken(new Text("hi"));
               logger.info("Created token: " + mrdt.toString());
 
diff --git a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/ExecuteAsUser.java b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/ExecuteAsUser.java
index d0aea33..f29c073 100644
--- a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/ExecuteAsUser.java
+++ b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/ExecuteAsUser.java
@@ -15,19 +15,18 @@
  */
 package azkaban.security;
 
-import org.apache.log4j.Logger;
-
 import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import org.apache.log4j.Logger;
 
 /**
- * This is a wrapper over the binary executable execute-as-user. It provides a simple API to run commands as
- * another user while abstracting away the process logic, commandline handling, etc.
- *
+ * This is a wrapper over the binary executable execute-as-user. It provides a simple API to run
+ * commands as another user while abstracting away the process logic, commandline handling, etc.
  */
 public class ExecuteAsUser {
+
   private final static Logger log = Logger.getLogger(ExecuteAsUser.class);
   private final static String EXECUTE_AS_USER = "execute-as-user";
 
@@ -44,9 +43,9 @@ public class ExecuteAsUser {
   }
 
   private void validate() {
-    if (!binaryExecutable.canExecute()) {
+    if (!this.binaryExecutable.canExecute()) {
       throw new RuntimeException("Unable to execute execute-as-user binary. Invalid Path: "
-          + binaryExecutable.getAbsolutePath());
+          + this.binaryExecutable.getAbsolutePath());
     }
   }
 
@@ -56,11 +55,10 @@ public class ExecuteAsUser {
    * @param user The proxy user
    * @param command the list containing the program and its arguments
    * @return The return value of the shell command
-   * @throws IOException
    */
   public int execute(final String user, final List<String> command) throws IOException {
     log.info("Command: " + command);
-    Process process = new ProcessBuilder()
+    final Process process = new ProcessBuilder()
         .command(constructExecuteAsCommand(user, command))
         .inheritIO()
         .start();
@@ -68,16 +66,16 @@ public class ExecuteAsUser {
     int exitCode;
     try {
       exitCode = process.waitFor();
-    } catch (InterruptedException e) {
+    } catch (final InterruptedException e) {
       log.error(e.getMessage(), e);
       exitCode = 1;
     }
     return exitCode;
   }
 
-  private List<String> constructExecuteAsCommand(String user, List<String> command) {
-    List<String> commandList = new ArrayList<>();
-    commandList.add(binaryExecutable.getAbsolutePath());
+  private List<String> constructExecuteAsCommand(final String user, final List<String> command) {
+    final List<String> commandList = new ArrayList<>();
+    commandList.add(this.binaryExecutable.getAbsolutePath());
     commandList.add(user);
     commandList.addAll(command);
     return commandList;
diff --git a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java
index bcaaa91..a756aee 100644
--- a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java
+++ b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java
@@ -16,6 +16,25 @@
 
 package azkaban.security;
 
+import azkaban.security.commons.HadoopSecurityManager;
+import azkaban.security.commons.HadoopSecurityManagerException;
+import azkaban.utils.Props;
+import azkaban.utils.UndefinedPropertyException;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.security.PrivilegedAction;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
@@ -50,110 +69,80 @@ import org.apache.hadoop.yarn.util.Records;
 import org.apache.log4j.Logger;
 import org.apache.thrift.TException;
 
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.security.PrivilegedAction;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-
-import azkaban.security.commons.HadoopSecurityManager;
-import azkaban.security.commons.HadoopSecurityManagerException;
-import azkaban.utils.Props;
-import azkaban.utils.UndefinedPropertyException;
-
 public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
   /**
    * TODO Remove duplicated constants from plugins.
    *
-   * Azkaban plugins don't depend on a common submodule from which they both can inherit code. Thus, constants are
-   * copied around and any changes to the constant values will break Azkaban. This needs to be fixed as part of a
-   * plugin infrastructure implementation.
+   * Azkaban plugins don't depend on a common submodule from which they both can inherit code. Thus,
+   * constants are copied around and any changes to the constant values will break Azkaban. This
+   * needs to be fixed as part of a plugin infrastructure implementation.
    */
   public static final String NATIVE_LIB_FOLDER = "azkaban.native.lib";
 
   /**
    * TODO: This should be exposed as a configurable parameter
    *
-   * The assumption is that an "azkaban" group exists which has access to data created by the azkaban process. For
-   * example, this may include delegation tokens created for other users to run their jobs.
+   * The assumption is that an "azkaban" group exists which has access to data created by the
+   * azkaban process. For example, this may include delegation tokens created for other users to run
+   * their jobs.
    */
   public static final String GROUP_NAME = "azkaban";
-
-  private static final String FS_HDFS_IMPL_DISABLE_CACHE =
-      "fs.hdfs.impl.disable.cache";
-
-  /** The Kerberos principal for the job tracker. */
+  /**
+   * The Kerberos principal for the job tracker.
+   */
   public static final String JT_PRINCIPAL = JTConfig.JT_USER_NAME;
-  // "mapreduce.jobtracker.kerberos.principal";
-  /** The Kerberos principal for the resource manager. */
+  /**
+   * The Kerberos principal for the resource manager.
+   */
   public static final String RM_PRINCIPAL = "yarn.resourcemanager.principal";
-
+  // "mapreduce.jobtracker.kerberos.principal";
   public static final String HADOOP_JOB_TRACKER = "mapred.job.tracker";
   public static final String HADOOP_JOB_TRACKER_2 =
       "mapreduce.jobtracker.address";
   public static final String HADOOP_YARN_RM = "yarn.resourcemanager.address";
-
+  /**
+   * the key that will be used to set proper signature for each of the hcat
+   * token when multiple hcat tokens are required to be fetched.
+   */
+  public static final String HIVE_TOKEN_SIGNATURE_KEY =
+      "hive.metastore.token.signature";
+  public static final Text DEFAULT_RENEWER = new Text("azkaban mr tokens");
+  public static final String CHOWN = "chown";
+  public static final String CHMOD = "chmod";
+  // The file permissions assigned to a Delegation token file on fetch
+  public static final String TOKEN_FILE_PERMISSIONS = "460";
+  private static final String FS_HDFS_IMPL_DISABLE_CACHE =
+      "fs.hdfs.impl.disable.cache";
   private static final String OTHER_NAMENODES_TO_GET_TOKEN = "other_namenodes";
-
   /**
    * the settings to be defined by user indicating if there are hcat locations
    * other than the default one the system should pre-fetch hcat token from.
    * Note: Multiple thrift uris are supported, use comma to separate the values,
    * values are case insensitive.
-   * */
+   */
   private static final String EXTRA_HCAT_LOCATION = "other_hcat_location";
-
-  /**
-   * the key that will be used to set proper signature for each of the hcat
-   * token when multiple hcat tokens are required to be fetched.
-   * */
-  public static final String HIVE_TOKEN_SIGNATURE_KEY =
-      "hive.metastore.token.signature";
-
-  public static final Text DEFAULT_RENEWER = new Text("azkaban mr tokens");
-
   private static final String AZKABAN_KEYTAB_LOCATION = "proxy.keytab.location";
   private static final String AZKABAN_PRINCIPAL = "proxy.user";
   private static final String OBTAIN_JOBHISTORYSERVER_TOKEN =
       "obtain.jobhistoryserver.token";
-  public static final String CHOWN = "chown";
-  public static final String CHMOD = "chmod";
-
-  // The file permissions assigned to a Delegation token file on fetch
-  public static final String TOKEN_FILE_PERMISSIONS = "460";
-
-  private UserGroupInformation loginUser = null;
   private final static Logger logger = Logger
       .getLogger(HadoopSecurityManager_H_2_0.class);
-  private Configuration conf;
-
-  private String keytabLocation;
-  private String keytabPrincipal;
-  private boolean shouldProxy = false;
-  private boolean securityEnabled = false;
-
   private static HadoopSecurityManager hsmInstance = null;
-  private ConcurrentMap<String, UserGroupInformation> userUgiMap;
-
   private static URLClassLoader ucl;
-
   private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
   private final ExecuteAsUser executeAsUser;
+  private final Configuration conf;
+  private final ConcurrentMap<String, UserGroupInformation> userUgiMap;
+  private UserGroupInformation loginUser = null;
+  private String keytabLocation;
+  private String keytabPrincipal;
+  private boolean shouldProxy = false;
+  private boolean securityEnabled = false;
 
-  private HadoopSecurityManager_H_2_0(Props props)
+  private HadoopSecurityManager_H_2_0(final Props props)
       throws HadoopSecurityManagerException, IOException {
-    executeAsUser = new ExecuteAsUser(props.getString(NATIVE_LIB_FOLDER));
+    this.executeAsUser = new ExecuteAsUser(props.getString(NATIVE_LIB_FOLDER));
 
     // for now, assume the same/compatible native library, the same/compatible
     // hadoop-core jar
@@ -167,7 +156,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
       hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
     }
 
-    List<URL> resources = new ArrayList<URL>();
+    final List<URL> resources = new ArrayList<>();
     URL urlToHadoop = null;
     if (hadoopConfDir != null) {
       urlToHadoop = new File(hadoopConfDir).toURI().toURL();
@@ -183,65 +172,65 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
     ucl = new URLClassLoader(resources.toArray(new URL[resources.size()]));
 
-    conf = new Configuration();
-    conf.setClassLoader(ucl);
+    this.conf = new Configuration();
+    this.conf.setClassLoader(ucl);
 
     if (props.containsKey(FS_HDFS_IMPL_DISABLE_CACHE)) {
       logger.info("Setting " + FS_HDFS_IMPL_DISABLE_CACHE + " to "
           + props.get(FS_HDFS_IMPL_DISABLE_CACHE));
-      conf.setBoolean(FS_HDFS_IMPL_DISABLE_CACHE,
+      this.conf.setBoolean(FS_HDFS_IMPL_DISABLE_CACHE,
           Boolean.valueOf(props.get(FS_HDFS_IMPL_DISABLE_CACHE)));
     }
 
     logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION + ": "
-        + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION));
+        + this.conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION));
     logger.info(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION + ":  "
-        + conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION));
+        + this.conf.get(CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION));
     logger.info(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY + ": "
-        + conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY));
+        + this.conf.get(CommonConfigurationKeys.FS_DEFAULT_NAME_KEY));
 
-    UserGroupInformation.setConfiguration(conf);
+    UserGroupInformation.setConfiguration(this.conf);
 
-    securityEnabled = UserGroupInformation.isSecurityEnabled();
-    if (securityEnabled) {
+    this.securityEnabled = UserGroupInformation.isSecurityEnabled();
+    if (this.securityEnabled) {
       logger.info("The Hadoop cluster has enabled security");
-      shouldProxy = true;
+      this.shouldProxy = true;
       try {
 
-        keytabLocation = props.getString(AZKABAN_KEYTAB_LOCATION);
-        keytabPrincipal = props.getString(AZKABAN_PRINCIPAL);
-      } catch (UndefinedPropertyException e) {
+        this.keytabLocation = props.getString(AZKABAN_KEYTAB_LOCATION);
+        this.keytabPrincipal = props.getString(AZKABAN_PRINCIPAL);
+      } catch (final UndefinedPropertyException e) {
         throw new HadoopSecurityManagerException(e.getMessage());
       }
 
       // try login
       try {
-        if (loginUser == null) {
+        if (this.loginUser == null) {
           logger.info("No login user. Creating login user");
-          logger.info("Using principal from " + keytabPrincipal + " and "
-              + keytabLocation);
-          UserGroupInformation.loginUserFromKeytab(keytabPrincipal,
-              keytabLocation);
-          loginUser = UserGroupInformation.getLoginUser();
-          logger.info("Logged in with user " + loginUser);
+          logger.info("Using principal from " + this.keytabPrincipal + " and "
+              + this.keytabLocation);
+          UserGroupInformation.loginUserFromKeytab(this.keytabPrincipal,
+              this.keytabLocation);
+          this.loginUser = UserGroupInformation.getLoginUser();
+          logger.info("Logged in with user " + this.loginUser);
         } else {
-          logger.info("loginUser (" + loginUser
+          logger.info("loginUser (" + this.loginUser
               + ") already created, refreshing tgt.");
-          loginUser.checkTGTAndReloginFromKeytab();
+          this.loginUser.checkTGTAndReloginFromKeytab();
         }
-      } catch (IOException e) {
+      } catch (final IOException e) {
         throw new HadoopSecurityManagerException(
             "Failed to login with kerberos ", e);
       }
 
     }
 
-    userUgiMap = new ConcurrentHashMap<String, UserGroupInformation>();
+    this.userUgiMap = new ConcurrentHashMap<>();
 
     logger.info("Hadoop Security Manager initialized");
   }
 
-  public static HadoopSecurityManager getInstance(Props props)
+  public static HadoopSecurityManager getInstance(final Props props)
       throws HadoopSecurityManagerException, IOException {
     if (hsmInstance == null) {
       synchronized (HadoopSecurityManager_H_2_0.class) {
@@ -261,34 +250,32 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
   /**
    * Create a proxied user based on the explicit user name, taking other
    * parameters necessary from properties file.
-   *
-   * @throws IOException
    */
   @Override
-  public synchronized UserGroupInformation getProxiedUser(String userToProxy)
+  public synchronized UserGroupInformation getProxiedUser(final String userToProxy)
       throws HadoopSecurityManagerException {
 
     if (userToProxy == null) {
       throw new HadoopSecurityManagerException("userToProxy can't be null");
     }
 
-    UserGroupInformation ugi = userUgiMap.get(userToProxy);
+    UserGroupInformation ugi = this.userUgiMap.get(userToProxy);
     if (ugi == null) {
       logger.info("proxy user " + userToProxy
           + " not exist. Creating new proxy user");
-      if (shouldProxy) {
+      if (this.shouldProxy) {
         try {
           ugi =
               UserGroupInformation.createProxyUser(userToProxy,
                   UserGroupInformation.getLoginUser());
-        } catch (IOException e) {
+        } catch (final IOException e) {
           throw new HadoopSecurityManagerException(
               "Failed to create proxy user", e);
         }
       } else {
         ugi = UserGroupInformation.createRemoteUser(userToProxy);
       }
-      userUgiMap.putIfAbsent(userToProxy, ugi);
+      this.userUgiMap.putIfAbsent(userToProxy, ugi);
     }
     return ugi;
   }
@@ -298,10 +285,10 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
    * from provided Properties.
    */
   @Override
-  public UserGroupInformation getProxiedUser(Props userProp)
+  public UserGroupInformation getProxiedUser(final Props userProp)
       throws HadoopSecurityManagerException {
-    String userToProxy = verifySecureProperty(userProp, USER_TO_PROXY);
-    UserGroupInformation user = getProxiedUser(userToProxy);
+    final String userToProxy = verifySecureProperty(userProp, USER_TO_PROXY);
+    final UserGroupInformation user = getProxiedUser(userToProxy);
     if (user == null) {
       throw new HadoopSecurityManagerException(
           "Proxy as any user in unsecured grid is not supported!");
@@ -309,9 +296,9 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
     return user;
   }
 
-  public String verifySecureProperty(Props props, String s)
+  public String verifySecureProperty(final Props props, final String s)
       throws HadoopSecurityManagerException {
-    String value = props.getString(s);
+    final String value = props.getString(s);
     if (value == null) {
       throw new HadoopSecurityManagerException(s + " not set in properties.");
     }
@@ -319,12 +306,12 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
   }
 
   @Override
-  public FileSystem getFSAsUser(String user)
+  public FileSystem getFSAsUser(final String user)
       throws HadoopSecurityManagerException {
-    FileSystem fs;
+    final FileSystem fs;
     try {
       logger.info("Getting file system as " + user);
-      UserGroupInformation ugi = getProxiedUser(user);
+      final UserGroupInformation ugi = getProxiedUser(user);
 
       if (ugi != null) {
         fs = ugi.doAs(new PrivilegedAction<FileSystem>() {
@@ -332,28 +319,28 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           @Override
           public FileSystem run() {
             try {
-              return FileSystem.get(conf);
-            } catch (IOException e) {
+              return FileSystem.get(HadoopSecurityManager_H_2_0.this.conf);
+            } catch (final IOException e) {
               throw new RuntimeException(e);
             }
           }
         });
       } else {
-        fs = FileSystem.get(conf);
+        fs = FileSystem.get(this.conf);
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new HadoopSecurityManagerException("Failed to get FileSystem. ", e);
     }
     return fs;
   }
 
   public boolean shouldProxy() {
-    return shouldProxy;
+    return this.shouldProxy;
   }
 
   @Override
   public boolean isHadoopSecurityEnabled() {
-    return securityEnabled;
+    return this.securityEnabled;
   }
 
   /*
@@ -374,14 +361,14 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           return null;
         }
 
-        private void getToken(String userToProxy) throws InterruptedException,
+        private void getToken(final String userToProxy) throws InterruptedException,
             IOException, HadoopSecurityManagerException {
 
-          FileSystem fs = FileSystem.get(conf);
+          final FileSystem fs = FileSystem.get(HadoopSecurityManager_H_2_0.this.conf);
           // check if we get the correct FS, and most importantly, the conf
           logger.info("Getting DFS token from " + fs.getCanonicalServiceName()
               + fs.getUri());
-          Token<?> fsToken = fs.getDelegationToken(userToProxy);
+          final Token<?> fsToken = fs.getDelegationToken(userToProxy);
           if (fsToken == null) {
             logger.error("Failed to fetch DFS token for ");
             throw new HadoopSecurityManagerException(
@@ -391,11 +378,11 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           logger.info("Token kind: " + fsToken.getKind());
           logger.info("Token service: " + fsToken.getService());
 
-          JobConf jc = new JobConf(conf);
-          JobClient jobClient = new JobClient(jc);
+          final JobConf jc = new JobConf(HadoopSecurityManager_H_2_0.this.conf);
+          final JobClient jobClient = new JobClient(jc);
           logger.info("Pre-fetching JT token: Got new JobClient: " + jc);
 
-          Token<DelegationTokenIdentifier> mrdt =
+          final Token<DelegationTokenIdentifier> mrdt =
               jobClient.getDelegationToken(new Text("mr token"));
           if (mrdt == null) {
             logger.error("Failed to fetch JT token for ");
@@ -414,13 +401,14 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           logger.info("Tokens loaded in " + tokenFile.getAbsolutePath());
         }
       });
-    } catch (Exception e) {
-      throw new HadoopSecurityManagerException("Failed to get hadoop tokens! " + e.getMessage() + e.getCause());
+    } catch (final Exception e) {
+      throw new HadoopSecurityManagerException(
+          "Failed to get hadoop tokens! " + e.getMessage() + e.getCause());
     }
   }
 
   private void cancelNameNodeToken(final Token<? extends TokenIdentifier> t,
-      String userToProxy) throws HadoopSecurityManagerException {
+      final String userToProxy) throws HadoopSecurityManagerException {
     try {
       getProxiedUser(userToProxy).doAs(new PrivilegedExceptionAction<Void>() {
         @Override
@@ -429,50 +417,49 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           return null;
         }
 
-        private void cancelToken(Token<?> nt) throws IOException,
+        private void cancelToken(final Token<?> nt) throws IOException,
             InterruptedException {
-          nt.cancel(conf);
+          nt.cancel(HadoopSecurityManager_H_2_0.this.conf);
         }
       });
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new HadoopSecurityManagerException("Failed to cancel token. "
           + e.getMessage() + e.getCause(), e);
     }
   }
 
   private void cancelMRJobTrackerToken(
-      final Token<? extends TokenIdentifier> t, String userToProxy)
+      final Token<? extends TokenIdentifier> t, final String userToProxy)
       throws HadoopSecurityManagerException {
     try {
       getProxiedUser(userToProxy).doAs(new PrivilegedExceptionAction<Void>() {
-        @SuppressWarnings("unchecked")
         @Override
         public Void run() throws Exception {
           cancelToken((Token<DelegationTokenIdentifier>) t);
           return null;
         }
 
-        private void cancelToken(Token<DelegationTokenIdentifier> jt)
+        private void cancelToken(final Token<DelegationTokenIdentifier> jt)
             throws IOException, InterruptedException {
-          JobConf jc = new JobConf(conf);
-          JobClient jobClient = new JobClient(jc);
+          final JobConf jc = new JobConf(HadoopSecurityManager_H_2_0.this.conf);
+          final JobClient jobClient = new JobClient(jc);
           jobClient.cancelDelegationToken(jt);
         }
       });
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new HadoopSecurityManagerException("Failed to cancel token. "
           + e.getMessage() + e.getCause(), e);
     }
   }
 
   private void cancelJhsToken(final Token<? extends TokenIdentifier> t,
-      String userToProxy) throws HadoopSecurityManagerException {
+      final String userToProxy) throws HadoopSecurityManagerException {
     // it appears yarn would clean up this token after app finish, after a long
     // while though.
-    org.apache.hadoop.yarn.api.records.Token token =
+    final org.apache.hadoop.yarn.api.records.Token token =
         org.apache.hadoop.yarn.api.records.Token.newInstance(t.getIdentifier(),
             t.getKind().toString(), t.getPassword(), t.getService().toString());
-    final YarnRPC rpc = YarnRPC.create(conf);
+    final YarnRPC rpc = YarnRPC.create(this.conf);
     final InetSocketAddress jhsAddress = SecurityUtil.getTokenServiceAddr(t);
     MRClientProtocol jhsProxy = null;
     try {
@@ -482,14 +469,14 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
                 @Override
                 public MRClientProtocol run() {
                   return (MRClientProtocol) rpc.getProxy(
-                      HSClientProtocol.class, jhsAddress, conf);
+                      HSClientProtocol.class, jhsAddress, HadoopSecurityManager_H_2_0.this.conf);
                 }
               });
-      CancelDelegationTokenRequest request =
+      final CancelDelegationTokenRequest request =
           Records.newRecord(CancelDelegationTokenRequest.class);
       request.setDelegationToken(token);
       jhsProxy.cancelDelegationToken(request);
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new HadoopSecurityManagerException("Failed to cancel token. "
           + e.getMessage() + e.getCause(), e);
     } finally {
@@ -499,19 +486,19 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
   }
 
   private void cancelHiveToken(final Token<? extends TokenIdentifier> t,
-      String userToProxy) throws HadoopSecurityManagerException {
+      final String userToProxy) throws HadoopSecurityManagerException {
     try {
-      HiveConf hiveConf = new HiveConf();
-      HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveConf);
+      final HiveConf hiveConf = new HiveConf();
+      final HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveConf);
       hiveClient.cancelDelegationToken(t.encodeToUrlString());
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new HadoopSecurityManagerException("Failed to cancel Token. "
           + e.getMessage() + e.getCause(), e);
     }
   }
 
   @Override
-  public void cancelTokens(File tokenFile, String userToProxy, Logger logger)
+  public void cancelTokens(final File tokenFile, final String userToProxy, final Logger logger)
       throws HadoopSecurityManagerException {
     // nntoken
     Credentials cred = null;
@@ -519,7 +506,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
       cred =
           Credentials.readTokenStorageFile(new Path(tokenFile.toURI()),
               new Configuration());
-      for (Token<? extends TokenIdentifier> t : cred.getAllTokens()) {
+      for (final Token<? extends TokenIdentifier> t : cred.getAllTokens()) {
         logger.info("Got token.");
         logger.info("Token kind: " + t.getKind());
         logger.info("Token service: " + t.getService());
@@ -540,7 +527,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           logger.info("unknown token type " + t.getKind());
         }
       }
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new HadoopSecurityManagerException("Failed to cancel tokens "
           + e.getMessage() + e.getCause(), e);
     }
@@ -551,20 +538,12 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
    * function to fetch hcat token as per the specified hive configuration and
    * then store the token in to the credential store specified .
    *
-   * @param userToProxy String value indicating the name of the user the token
-   *          will be fetched for.
-   * @param hiveConf the configuration based off which the hive client will be
-   *          initialized.
-   * @param logger the logger instance which writes the logging content to the
-   *          job logs.
-   *
-   * @throws IOException
-   * @throws TException
-   * @throws MetaException
-   *
-   * */
-  private Token<DelegationTokenIdentifier> fetchHcatToken(String userToProxy,
-      HiveConf hiveConf, String tokenSignatureOverwrite, final Logger logger)
+   * @param userToProxy String value indicating the name of the user the token will be fetched for.
+   * @param hiveConf the configuration based off which the hive client will be initialized.
+   * @param logger the logger instance which writes the logging content to the job logs.
+   */
+  private Token<DelegationTokenIdentifier> fetchHcatToken(final String userToProxy,
+      final HiveConf hiveConf, final String tokenSignatureOverwrite, final Logger logger)
       throws IOException, MetaException, TException {
 
     logger.info(HiveConf.ConfVars.METASTOREURIS.varname + ": "
@@ -576,12 +555,12 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
     logger.info(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname + ": "
         + hiveConf.get(HiveConf.ConfVars.METASTORE_KERBEROS_PRINCIPAL.varname));
 
-    HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveConf);
-    String hcatTokenStr =
+    final HiveMetaStoreClient hiveClient = new HiveMetaStoreClient(hiveConf);
+    final String hcatTokenStr =
         hiveClient.getDelegationToken(userToProxy, UserGroupInformation
             .getLoginUser().getShortUserName());
-    Token<DelegationTokenIdentifier> hcatToken =
-        new Token<DelegationTokenIdentifier>();
+    final Token<DelegationTokenIdentifier> hcatToken =
+        new Token<>();
     hcatToken.decodeFromUrlString(hcatTokenStr);
 
     // overwrite the value of the service property of the token if the signature
@@ -635,7 +614,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           logger.info("Need to pre-fetch extra metaStore tokens from hive.");
 
           // start to process the user inputs.
-          for (String thriftUrl : extraHcatLocations) {
+          for (final String thriftUrl : extraHcatLocations) {
             logger.info("Pre-fetching metaStore token from : " + thriftUrl);
 
             hiveConf = new HiveConf();
@@ -647,8 +626,8 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
         }
 
-      } catch (Throwable t) {
-        String message =
+      } catch (final Throwable t) {
+        final String message =
             "Failed to get hive metastore token." + t.getMessage()
                 + t.getCause();
         logger.error(message, t);
@@ -657,19 +636,19 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
     }
 
     if (props.getBoolean(OBTAIN_JOBHISTORYSERVER_TOKEN, false)) {
-      YarnRPC rpc = YarnRPC.create(conf);
-      final String serviceAddr = conf.get(JHAdminConfig.MR_HISTORY_ADDRESS);
+      final YarnRPC rpc = YarnRPC.create(this.conf);
+      final String serviceAddr = this.conf.get(JHAdminConfig.MR_HISTORY_ADDRESS);
 
       logger.debug("Connecting to HistoryServer at: " + serviceAddr);
-      HSClientProtocol hsProxy =
+      final HSClientProtocol hsProxy =
           (HSClientProtocol) rpc.getProxy(HSClientProtocol.class,
-              NetUtils.createSocketAddr(serviceAddr), conf);
+              NetUtils.createSocketAddr(serviceAddr), this.conf);
       logger.info("Pre-fetching JH token from job history server");
 
       Token<?> jhsdt = null;
       try {
         jhsdt = getDelegationTokenFromHS(hsProxy);
-      } catch (Exception e) {
+      } catch (final Exception e) {
         logger.error("Failed to fetch JH token", e);
         throw new HadoopSecurityManagerException(
             "Failed to fetch JH token for " + userToProxy);
@@ -696,16 +675,16 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           return null;
         }
 
-        private void getToken(String userToProxy) throws InterruptedException,
+        private void getToken(final String userToProxy) throws InterruptedException,
             IOException, HadoopSecurityManagerException {
           logger.info("Here is the props for " + OBTAIN_NAMENODE_TOKEN + ": "
               + props.getBoolean(OBTAIN_NAMENODE_TOKEN));
           if (props.getBoolean(OBTAIN_NAMENODE_TOKEN, false)) {
-            FileSystem fs = FileSystem.get(conf);
+            final FileSystem fs = FileSystem.get(HadoopSecurityManager_H_2_0.this.conf);
             // check if we get the correct FS, and most importantly, the
             // conf
             logger.info("Getting DFS token from " + fs.getUri());
-            Token<?> fsToken =
+            final Token<?> fsToken =
                 fs.getDelegationToken(getMRTokenRenewerInternal(new JobConf())
                     .toString());
             if (fsToken == null) {
@@ -720,16 +699,16 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
             cred.addToken(fsToken.getService(), fsToken);
 
             // getting additional name nodes tokens
-            String otherNamenodes = props.get(OTHER_NAMENODES_TO_GET_TOKEN);
+            final String otherNamenodes = props.get(OTHER_NAMENODES_TO_GET_TOKEN);
             if ((otherNamenodes != null) && (otherNamenodes.length() > 0)) {
               logger.info(OTHER_NAMENODES_TO_GET_TOKEN + ": '" + otherNamenodes
                   + "'");
-              String[] nameNodeArr = otherNamenodes.split(",");
-              Path[] ps = new Path[nameNodeArr.length];
+              final String[] nameNodeArr = otherNamenodes.split(",");
+              final Path[] ps = new Path[nameNodeArr.length];
               for (int i = 0; i < ps.length; i++) {
                 ps[i] = new Path(nameNodeArr[i].trim());
               }
-              TokenCache.obtainTokensForNamenodes(cred, ps, conf);
+              TokenCache.obtainTokensForNamenodes(cred, ps, HadoopSecurityManager_H_2_0.this.conf);
               logger.info("Successfully fetched tokens for: " + otherNamenodes);
             } else {
               logger.info(OTHER_NAMENODES_TO_GET_TOKEN + " was not configured");
@@ -737,11 +716,11 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           }
 
           if (props.getBoolean(OBTAIN_JOBTRACKER_TOKEN, false)) {
-            JobConf jobConf = new JobConf();
-            JobClient jobClient = new JobClient(jobConf);
+            final JobConf jobConf = new JobConf();
+            final JobClient jobClient = new JobClient(jobConf);
             logger.info("Pre-fetching JT token from JobTracker");
 
-            Token<DelegationTokenIdentifier> mrdt =
+            final Token<DelegationTokenIdentifier> mrdt =
                 jobClient
                     .getDelegationToken(getMRTokenRenewerInternal(jobConf));
             if (mrdt == null) {
@@ -763,10 +742,10 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
       logger.info("Tokens loaded in " + tokenFile.getAbsolutePath());
 
-    } catch (Exception e) {
+    } catch (final Exception e) {
       throw new HadoopSecurityManagerException("Failed to get hadoop tokens! "
           + e.getMessage() + e.getCause(), e);
-    } catch (Throwable t) {
+    } catch (final Throwable t) {
       throw new HadoopSecurityManagerException("Failed to get hadoop tokens! "
           + t.getMessage() + t.getCause(), t);
     }
@@ -775,7 +754,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
   /**
    * Prepare token file.
-   *  Writes credentials to a token file and sets appropriate permissions to keep the file secure
+   * Writes credentials to a token file and sets appropriate permissions to keep the file secure
    *
    * @param user user to be proxied
    * @param credentials Credentials to be written to file
@@ -784,20 +763,22 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
    * @throws IOException If there are issues in reading / updating the token file
    */
   private void prepareTokenFile(final String user,
-                                final Credentials credentials,
-                                final File tokenFile,
-                                final Logger logger) throws IOException {
+      final Credentials credentials,
+      final File tokenFile,
+      final Logger logger) throws IOException {
     writeCredentialsToFile(credentials, tokenFile, logger);
     try {
       assignPermissions(user, tokenFile, logger);
-    } catch (IOException e) {
+    } catch (final IOException e) {
       // On any error managing token file. delete the file
       tokenFile.delete();
       throw e;
     }
   }
 
-  private void writeCredentialsToFile(Credentials credentials, File tokenFile, Logger logger) throws IOException {
+  private void writeCredentialsToFile(final Credentials credentials, final File tokenFile,
+      final Logger logger)
+      throws IOException {
     FileOutputStream fos = null;
     DataOutputStream dos = null;
     try {
@@ -808,7 +789,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
       if (dos != null) {
         try {
           dos.close();
-        } catch (Throwable t) {
+        } catch (final Throwable t) {
           // best effort
           logger.error("encountered exception while closing DataOutputStream of the tokenFile", t);
         }
@@ -829,11 +810,13 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
    * @param tokenFile file to be written
    * @param logger logger to use
    */
-  private void assignPermissions(String user, File tokenFile, Logger logger) throws IOException {
+  private void assignPermissions(final String user, final File tokenFile, final Logger logger)
+      throws IOException {
     final List<String> changePermissionsCommand = Arrays.asList(
         CHMOD, TOKEN_FILE_PERMISSIONS, tokenFile.getAbsolutePath()
     );
-    int result = executeAsUser.execute(System.getProperty("user.name"), changePermissionsCommand);
+    int result = this.executeAsUser
+        .execute(System.getProperty("user.name"), changePermissionsCommand);
     if (result != 0) {
       throw new IOException("Unable to modify permissions. User: " + user);
     }
@@ -841,21 +824,21 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
     final List<String> changeOwnershipCommand = Arrays.asList(
         CHOWN, user + ":" + GROUP_NAME, tokenFile.getAbsolutePath()
     );
-    result = executeAsUser.execute("root", changeOwnershipCommand);
+    result = this.executeAsUser.execute("root", changeOwnershipCommand);
     if (result != 0) {
       throw new IOException("Unable to set ownership. User: " + user);
     }
   }
 
-  private Text getMRTokenRenewerInternal(JobConf jobConf) throws IOException {
+  private Text getMRTokenRenewerInternal(final JobConf jobConf) throws IOException {
     // Taken from Oozie
     //
     // Getting renewer correctly for JT principal also though JT in hadoop
     // 1.x does not have
     // support for renewing/cancelling tokens
-    String servicePrincipal =
+    final String servicePrincipal =
         jobConf.get(RM_PRINCIPAL, jobConf.get(JT_PRINCIPAL));
-    Text renewer;
+    final Text renewer;
     if (servicePrincipal != null) {
       String target =
           jobConf.get(HADOOP_YARN_RM, jobConf.get(HADOOP_JOB_TRACKER_2));
@@ -863,7 +846,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
         target = jobConf.get(HADOOP_JOB_TRACKER);
       }
 
-      String addr = NetUtils.createSocketAddr(target).getHostName();
+      final String addr = NetUtils.createSocketAddr(target).getHostName();
       renewer =
           new Text(SecurityUtil.getServerPrincipal(servicePrincipal, addr));
     } else {
@@ -874,12 +857,12 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
     return renewer;
   }
 
-  private Token<?> getDelegationTokenFromHS(HSClientProtocol hsProxy)
+  private Token<?> getDelegationTokenFromHS(final HSClientProtocol hsProxy)
       throws IOException, InterruptedException {
-    GetDelegationTokenRequest request =
-        recordFactory.newRecordInstance(GetDelegationTokenRequest.class);
-    request.setRenewer(Master.getMasterPrincipal(conf));
-    org.apache.hadoop.yarn.api.records.Token mrDelegationToken;
+    final GetDelegationTokenRequest request =
+        this.recordFactory.newRecordInstance(GetDelegationTokenRequest.class);
+    request.setRenewer(Master.getMasterPrincipal(this.conf));
+    final org.apache.hadoop.yarn.api.records.Token mrDelegationToken;
     mrDelegationToken =
         hsProxy.getDelegationToken(request).getDelegationToken();
     return ConverterUtils.convertFromYarn(mrDelegationToken,
@@ -887,10 +870,10 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
   }
 
   private void cancelDelegationTokenFromHS(
-      final org.apache.hadoop.yarn.api.records.Token t, HSClientProtocol hsProxy)
+      final org.apache.hadoop.yarn.api.records.Token t, final HSClientProtocol hsProxy)
       throws IOException, InterruptedException {
-    CancelDelegationTokenRequest request =
-        recordFactory.newRecordInstance(CancelDelegationTokenRequest.class);
+    final CancelDelegationTokenRequest request =
+        this.recordFactory.newRecordInstance(CancelDelegationTokenRequest.class);
     request.setDelegationToken(t);
     hsProxy.cancelDelegationToken(request);
   }
diff --git a/azkaban-solo-server/build.gradle b/azkaban-solo-server/build.gradle
index f1bb54b..4b23b2b 100644
--- a/azkaban-solo-server/build.gradle
+++ b/azkaban-solo-server/build.gradle
@@ -1,11 +1,11 @@
 apply plugin: 'distribution'
 
 dependencies {
-  compile(project(':azkaban-web-server'))
-  compile(project(':azkaban-exec-server'))
+    compile(project(':azkaban-web-server'))
+    compile(project(':azkaban-exec-server'))
 
-  runtime('org.slf4j:slf4j-log4j12:1.7.18')
-  runtime('com.h2database:h2:1.4.193')
+    runtime('org.slf4j:slf4j-log4j12:1.7.18')
+    runtime('com.h2database:h2:1.4.193')
 }
 
 installDist.dependsOn ':azkaban-web-server:installDist'
@@ -13,35 +13,35 @@ distTar.dependsOn ':azkaban-web-server:installDist'
 distZip.dependsOn ':azkaban-web-server:installDist'
 
 distributions {
-  main {
-    contents {
-      from('src/main/bash') {
-        into 'bin'
-        fileMode = 0755
-      }
-      from(project(':azkaban-common').files('src/main/bash')) {
-        into 'bin'
-        fileMode = 0755
-      }
-      from ('src/main/resources/conf') {
-        into 'conf'
-      }
-      from ('src/main/resources/commonprivate.properties') {
-        into 'plugins/jobtypes'
-      }
-      from(configurations.runtime) {
-        into 'lib'
-      }
-      from(jar) {
-        into 'lib'
-      }
-      from(project(':azkaban-db').files('src/main/sql')) {
-        into('sql')
-      }
-      // Get the files from sub-dir of distribution of another sub-project
-      from(new File(tasks.getByPath(':azkaban-web-server:installDist').destinationDir, 'web')) {
-        into 'web'
-      }
+    main {
+        contents {
+            from('src/main/bash') {
+                into 'bin'
+                fileMode = 0755
+            }
+            from(project(':azkaban-common').files('src/main/bash')) {
+                into 'bin'
+                fileMode = 0755
+            }
+            from('src/main/resources/conf') {
+                into 'conf'
+            }
+            from('src/main/resources/commonprivate.properties') {
+                into 'plugins/jobtypes'
+            }
+            from(configurations.runtime) {
+                into 'lib'
+            }
+            from(jar) {
+                into 'lib'
+            }
+            from(project(':azkaban-db').files('src/main/sql')) {
+                into('sql')
+            }
+            // Get the files from sub-dir of distribution of another sub-project
+            from(new File(tasks.getByPath(':azkaban-web-server:installDist').destinationDir, 'web')) {
+                into 'web'
+            }
+        }
     }
-  }
 }
diff --git a/azkaban-solo-server/src/main/java/azkaban/soloserver/AzkabanSingleServer.java b/azkaban-solo-server/src/main/java/azkaban/soloserver/AzkabanSingleServer.java
index 7a0c306..3fa95b1 100644
--- a/azkaban-solo-server/src/main/java/azkaban/soloserver/AzkabanSingleServer.java
+++ b/azkaban-solo-server/src/main/java/azkaban/soloserver/AzkabanSingleServer.java
@@ -16,48 +16,41 @@
 
 package azkaban.soloserver;
 
+import static azkaban.ServiceProvider.SERVICE_PROVIDER;
+
 import azkaban.AzkabanCommonModule;
+import azkaban.database.AzkabanDatabaseSetup;
+import azkaban.database.AzkabanDatabaseUpdater;
 import azkaban.execapp.AzkabanExecServerModule;
+import azkaban.execapp.AzkabanExecutorServer;
+import azkaban.server.AzkabanServer;
+import azkaban.utils.Props;
+import azkaban.webapp.AzkabanWebServer;
 import azkaban.webapp.AzkabanWebServerModule;
 import com.google.inject.Guice;
 import com.google.inject.Inject;
 import com.google.inject.Injector;
 import org.apache.log4j.Logger;
 
-import azkaban.database.AzkabanDatabaseSetup;
-import azkaban.database.AzkabanDatabaseUpdater;
-import azkaban.execapp.AzkabanExecutorServer;
-import azkaban.server.AzkabanServer;
-import azkaban.webapp.AzkabanWebServer;
-import azkaban.utils.Props;
-
-import static azkaban.ServiceProvider.*;
-
 
 public class AzkabanSingleServer {
+
   private static final Logger log = Logger.getLogger(AzkabanWebServer.class);
 
   private final AzkabanWebServer webServer;
   private final AzkabanExecutorServer executor;
 
   @Inject
-  public AzkabanSingleServer(AzkabanWebServer webServer, AzkabanExecutorServer executor) {
+  public AzkabanSingleServer(final AzkabanWebServer webServer,
+      final AzkabanExecutorServer executor) {
     this.webServer = webServer;
     this.executor = executor;
   }
 
-  private void launch() throws Exception {
-    AzkabanWebServer.launch(webServer);
-    log.info("Azkaban Web Server started...");
-
-    AzkabanExecutorServer.launch(executor);
-    log.info("Azkaban Exec Server started...");
-  }
-
-  public static void main(String[] args) throws Exception {
+  public static void main(final String[] args) throws Exception {
     log.info("Starting Azkaban Server");
 
-    Props props = AzkabanServer.loadProps(args);
+    final Props props = AzkabanServer.loadProps(args);
     if (props == null) {
       log.error("Properties not found. Need it to connect to the db.");
       log.error("Exiting...");
@@ -65,8 +58,9 @@ public class AzkabanSingleServer {
     }
 
     if (props.getBoolean(AzkabanDatabaseSetup.DATABASE_CHECK_VERSION, true)) {
-      boolean updateDB = props.getBoolean(AzkabanDatabaseSetup.DATABASE_AUTO_UPDATE_TABLES, true);
-      String scriptDir = props.getString(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, "sql");
+      final boolean updateDB = props
+          .getBoolean(AzkabanDatabaseSetup.DATABASE_AUTO_UPDATE_TABLES, true);
+      final String scriptDir = props.getString(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, "sql");
       AzkabanDatabaseUpdater.runDatabaseUpdater(props, scriptDir, updateDB);
     }
 
@@ -81,4 +75,12 @@ public class AzkabanSingleServer {
     /* Launch server */
     injector.getInstance(AzkabanSingleServer.class).launch();
   }
+
+  private void launch() throws Exception {
+    AzkabanWebServer.launch(this.webServer);
+    log.info("Azkaban Web Server started...");
+
+    AzkabanExecutorServer.launch(this.executor);
+    log.info("Azkaban Exec Server started...");
+  }
 }
diff --git a/azkaban-solo-server/src/main/resources/conf/azkaban.properties b/azkaban-solo-server/src/main/resources/conf/azkaban.properties
index fc4e46f..bac555e 100644
--- a/azkaban-solo-server/src/main/resources/conf/azkaban.properties
+++ b/azkaban-solo-server/src/main/resources/conf/azkaban.properties
@@ -5,43 +5,32 @@ azkaban.color=#FF3601
 azkaban.default.servlet.path=/index
 web.resource.dir=web/
 default.timezone.id=America/Los_Angeles
-
 # Azkaban UserManager class
 user.manager.class=azkaban.user.XmlUserManager
 user.manager.xml.file=conf/azkaban-users.xml
-
 # Loader for projects
 executor.global.properties=conf/global.properties
 azkaban.project.dir=projects
-
 database.type=h2
 h2.path=./h2
 h2.create.tables=true
-
 # Velocity dev mode
 velocity.dev.mode=false
-
 # Azkaban Jetty server properties.
 jetty.use.ssl=false
 jetty.maxThreads=25
 jetty.port=8081
-
 # Azkaban Executor settings
 executor.port=12321
-
 # mail settings
 mail.sender=
 mail.host=
 job.failure.email=
 job.success.email=
-
 lockdown.create.projects=false
-
 cache.directory=cache
-
 # JMX stats
 jetty.connector.stats=true
 executor.connector.stats=true
-
 # Azkaban plugin settings
 azkaban.jobtype.plugin.dir=plugins/jobtypes
diff --git a/azkaban-solo-server/src/main/resources/conf/azkaban-users.xml b/azkaban-solo-server/src/main/resources/conf/azkaban-users.xml
index a13035d..0df8fd1 100644
--- a/azkaban-solo-server/src/main/resources/conf/azkaban-users.xml
+++ b/azkaban-solo-server/src/main/resources/conf/azkaban-users.xml
@@ -1,7 +1,7 @@
 <azkaban-users>
-	<user username="azkaban" password="azkaban" roles="admin" groups="azkaban" />
-	<user username="metrics" password="metrics" roles="metrics"/>
+  <user groups="azkaban" password="azkaban" roles="admin" username="azkaban"/>
+  <user password="metrics" roles="metrics" username="metrics"/>
 
-	<role name="admin" permissions="ADMIN" />
-	<role name="metrics" permissions="METRICS"/>
+  <role name="admin" permissions="ADMIN"/>
+  <role name="metrics" permissions="METRICS"/>
 </azkaban-users>
diff --git a/azkaban-solo-server/src/main/resources/log4j.properties b/azkaban-solo-server/src/main/resources/log4j.properties
index 705b927..2db62b2 100644
--- a/azkaban-solo-server/src/main/resources/log4j.properties
+++ b/azkaban-solo-server/src/main/resources/log4j.properties
@@ -1,13 +1,11 @@
 log4j.rootLogger=INFO, Console
 log4j.logger.azkaban=INFO, server
-
 log4j.appender.server=org.apache.log4j.RollingFileAppender
 log4j.appender.server.layout=org.apache.log4j.PatternLayout
 log4j.appender.server.File=azkaban-webserver.log
 log4j.appender.server.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss.SSS Z} %p [%c{1}] [Azkaban] %m%n
 log4j.appender.server.MaxFileSize=102400MB
 log4j.appender.server.MaxBackupIndex=2
-
 log4j.appender.Console=org.apache.log4j.ConsoleAppender
 log4j.appender.Console.layout=org.apache.log4j.PatternLayout
 log4j.appender.Console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss.SSS Z} %p [%c{1}] [Azkaban] %m%n
diff --git a/azkaban-solo-server/src/test/java/azkaban/soloserver/AzkabanSingleServerTest.java b/azkaban-solo-server/src/test/java/azkaban/soloserver/AzkabanSingleServerTest.java
index 2cd1035..495aad8 100644
--- a/azkaban-solo-server/src/test/java/azkaban/soloserver/AzkabanSingleServerTest.java
+++ b/azkaban-solo-server/src/test/java/azkaban/soloserver/AzkabanSingleServerTest.java
@@ -17,6 +17,12 @@
 
 package azkaban.soloserver;
 
+import static azkaban.ServiceProvider.*;
+import static azkaban.executor.ExecutorManager.*;
+import static java.util.Objects.*;
+import static org.apache.commons.io.FileUtils.*;
+import static org.junit.Assert.*;
+
 import azkaban.AzkabanCommonModule;
 import azkaban.database.AzkabanDatabaseSetup;
 import azkaban.database.AzkabanDatabaseUpdater;
@@ -35,17 +41,11 @@ import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.Test;
 
-import static azkaban.ServiceProvider.*;
-import static azkaban.executor.ExecutorManager.*;
-import static java.util.Objects.*;
-import static org.apache.commons.io.FileUtils.*;
-import static org.junit.Assert.*;
-
 
 public class AzkabanSingleServerTest {
-  private static final Logger log = Logger.getLogger(AzkabanSingleServerTest.class);
-  public static final String AZKABAN_DB_SQL_PATH = "azkaban-db/src/main/sql";
 
+  public static final String AZKABAN_DB_SQL_PATH = "azkaban-db/src/main/sql";
+  private static final Logger log = Logger.getLogger(AzkabanSingleServerTest.class);
   private static final Props props = new Props();
 
   private static String getConfPath() {
@@ -55,13 +55,22 @@ public class AzkabanSingleServerTest {
 
   private static String getSqlScriptsDir() throws IOException {
     // Dummy because any resource file works.
-    Path resources = Paths.get(getConfPath()).getParent();
-    Path azkabanRoot = resources.getParent().getParent().getParent().getParent();
+    final Path resources = Paths.get(getConfPath()).getParent();
+    final Path azkabanRoot = resources.getParent().getParent().getParent().getParent();
 
-    File sqlScriptDir = Paths.get(azkabanRoot.toString(), AZKABAN_DB_SQL_PATH).toFile();
+    final File sqlScriptDir = Paths.get(azkabanRoot.toString(), AZKABAN_DB_SQL_PATH).toFile();
     return sqlScriptDir.getCanonicalPath();
   }
 
+  @AfterClass
+  public static void tearDown() throws Exception {
+    deleteQuietly(new File("h2.mv.db"));
+    deleteQuietly(new File("h2.trace.db"));
+    deleteQuietly(new File("executor.port"));
+    deleteQuietly(new File("executions"));
+    deleteQuietly(new File("projects"));
+  }
+
   @Before
   public void setUp() throws Exception {
     tearDown();
@@ -79,21 +88,12 @@ public class AzkabanSingleServerTest {
     props.put("user.manager.xml.file", new File(confPath, "azkaban-users.xml").getPath());
     props.put("executor.port", "12321");
 
-    String sqlScriptsDir = getSqlScriptsDir();
+    final String sqlScriptsDir = getSqlScriptsDir();
     assertTrue(new File(sqlScriptsDir).isDirectory());
     props.put(AzkabanDatabaseSetup.DATABASE_SQL_SCRIPT_DIR, sqlScriptsDir);
     AzkabanDatabaseUpdater.runDatabaseUpdater(props, sqlScriptsDir, true);
   }
 
-  @AfterClass
-  public static void tearDown() throws Exception {
-    deleteQuietly(new File("h2.mv.db"));
-    deleteQuietly(new File("h2.trace.db"));
-    deleteQuietly(new File("executor.port"));
-    deleteQuietly(new File("executions"));
-    deleteQuietly(new File("projects"));
-  }
-
   @Test
   public void testInjection() throws Exception {
     SERVICE_PROVIDER.unsetInjector();
diff --git a/azkaban-solo-server/src/test/resources/conf/azkaban-users.xml b/azkaban-solo-server/src/test/resources/conf/azkaban-users.xml
index a13035d..0df8fd1 100644
--- a/azkaban-solo-server/src/test/resources/conf/azkaban-users.xml
+++ b/azkaban-solo-server/src/test/resources/conf/azkaban-users.xml
@@ -1,7 +1,7 @@
 <azkaban-users>
-	<user username="azkaban" password="azkaban" roles="admin" groups="azkaban" />
-	<user username="metrics" password="metrics" roles="metrics"/>
+  <user groups="azkaban" password="azkaban" roles="admin" username="azkaban"/>
+  <user password="metrics" roles="metrics" username="metrics"/>
 
-	<role name="admin" permissions="ADMIN" />
-	<role name="metrics" permissions="METRICS"/>
+  <role name="admin" permissions="ADMIN"/>
+  <role name="metrics" permissions="METRICS"/>
 </azkaban-users>
diff --git a/azkaban-solo-server/src/test/resources/log4j.properties b/azkaban-solo-server/src/test/resources/log4j.properties
index 705b927..2db62b2 100644
--- a/azkaban-solo-server/src/test/resources/log4j.properties
+++ b/azkaban-solo-server/src/test/resources/log4j.properties
@@ -1,13 +1,11 @@
 log4j.rootLogger=INFO, Console
 log4j.logger.azkaban=INFO, server
-
 log4j.appender.server=org.apache.log4j.RollingFileAppender
 log4j.appender.server.layout=org.apache.log4j.PatternLayout
 log4j.appender.server.File=azkaban-webserver.log
 log4j.appender.server.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss.SSS Z} %p [%c{1}] [Azkaban] %m%n
 log4j.appender.server.MaxFileSize=102400MB
 log4j.appender.server.MaxBackupIndex=2
-
 log4j.appender.Console=org.apache.log4j.ConsoleAppender
 log4j.appender.Console.layout=org.apache.log4j.PatternLayout
 log4j.appender.Console.layout.ConversionPattern=%d{yyyy/MM/dd HH:mm:ss.SSS Z} %p [%c{1}] [Azkaban] %m%n
diff --git a/azkaban-spi/build.gradle b/azkaban-spi/build.gradle
index 4087b7b..e991648 100644
--- a/azkaban-spi/build.gradle
+++ b/azkaban-spi/build.gradle
@@ -1,4 +1,3 @@
-
 /*
  * Copyright 2017 LinkedIn Corp.
  *
@@ -17,7 +16,7 @@
  */
 
 dependencies {
-  /**
-   * The dependency list of the spi package should be NONE or VERY MINIMAL!! See @README.md of this package.
-   **/
+    /**
+     * The dependency list of the spi package should be NONE or VERY MINIMAL!! See @README.md of this package.
+     **/
 }
diff --git a/azkaban-spi/src/main/java/azkaban/spi/AzkabanException.java b/azkaban-spi/src/main/java/azkaban/spi/AzkabanException.java
index e9d57e8..a4f451f 100644
--- a/azkaban-spi/src/main/java/azkaban/spi/AzkabanException.java
+++ b/azkaban-spi/src/main/java/azkaban/spi/AzkabanException.java
@@ -18,20 +18,21 @@
 package azkaban.spi;
 
 public class AzkabanException extends RuntimeException {
-  public AzkabanException(String message) {
+
+  public AzkabanException(final String message) {
     this(message, null);
   }
 
-  public AzkabanException(Throwable throwable) {
+  public AzkabanException(final Throwable throwable) {
     this(null, throwable);
   }
 
-  public AzkabanException(String message, Throwable cause) {
+  public AzkabanException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
-  public AzkabanException(String message, Throwable cause,
-      boolean enableSuppression, boolean writableStackTrace) {
+  public AzkabanException(final String message, final Throwable cause,
+      final boolean enableSuppression, final boolean writableStackTrace) {
     super(message, cause, enableSuppression, writableStackTrace);
   }
 
diff --git a/azkaban-spi/src/main/java/azkaban/spi/Storage.java b/azkaban-spi/src/main/java/azkaban/spi/Storage.java
index 7c7ed9d..1b2ddb9 100644
--- a/azkaban-spi/src/main/java/azkaban/spi/Storage.java
+++ b/azkaban-spi/src/main/java/azkaban/spi/Storage.java
@@ -23,11 +23,12 @@ import java.io.InputStream;
 
 
 /**
- * The Azkaban Storage interface would facilitate getting and putting objects into a storage mechanism of choice.
- * By default, this is set to the MySQL database. However, users can have the ability to choose between multiple
- * storage types in future.
+ * The Azkaban Storage interface would facilitate getting and putting objects into a storage
+ * mechanism of choice. By default, this is set to the MySQL database. However, users can have the
+ * ability to choose between multiple storage types in future.
  *
- * This is different from storing Azkaban state in MySQL which would typically be maintained in a different database.
+ * This is different from storing Azkaban state in MySQL which would typically be maintained in a
+ * different database.
  *
  * Note: This is a synchronous interface.
  */
@@ -38,7 +39,6 @@ public interface Storage {
    *
    * @param key The key is a string pointing to the blob in Storage.
    * @return InputStream for fetching the blob. null if the key is not found.
-   *
    */
   InputStream get(String key) throws IOException;
 
@@ -47,7 +47,6 @@ public interface Storage {
    *
    * @param metadata Metadata related to the input stream
    * @param localFile Read data from a local file
-   *
    * @return Key associated with the current object on successful put
    */
   String put(StorageMetadata metadata, File localFile);
diff --git a/azkaban-spi/src/main/java/azkaban/spi/StorageException.java b/azkaban-spi/src/main/java/azkaban/spi/StorageException.java
index 2006063..93830ac 100644
--- a/azkaban-spi/src/main/java/azkaban/spi/StorageException.java
+++ b/azkaban-spi/src/main/java/azkaban/spi/StorageException.java
@@ -21,20 +21,21 @@ package azkaban.spi;
  * Super class to capture any exceptions related to {@link Storage}
  */
 public class StorageException extends AzkabanException {
-  public StorageException(String message) {
+
+  public StorageException(final String message) {
     this(message, null);
   }
 
-  public StorageException(Throwable throwable) {
+  public StorageException(final Throwable throwable) {
     this(null, throwable);
   }
 
-  public StorageException(String message, Throwable cause) {
+  public StorageException(final String message, final Throwable cause) {
     super(message, cause);
   }
 
-  public StorageException(String message, Throwable cause,
-      boolean enableSuppression, boolean writableStackTrace) {
+  public StorageException(final String message, final Throwable cause,
+      final boolean enableSuppression, final boolean writableStackTrace) {
     super(message, cause, enableSuppression, writableStackTrace);
   }
 
diff --git a/azkaban-spi/src/main/java/azkaban/spi/StorageMetadata.java b/azkaban-spi/src/main/java/azkaban/spi/StorageMetadata.java
index 19f31a5..85dec6f 100644
--- a/azkaban-spi/src/main/java/azkaban/spi/StorageMetadata.java
+++ b/azkaban-spi/src/main/java/azkaban/spi/StorageMetadata.java
@@ -17,18 +17,20 @@
 
 package azkaban.spi;
 
-import java.util.Objects;
+import static java.util.Objects.requireNonNull;
 
-import static java.util.Objects.*;
+import java.util.Objects;
 
 
 public class StorageMetadata {
+
   private final int projectId;
   private final int version;
   private final String uploader;
-  private byte[] hash;
+  private final byte[] hash;
 
-  public StorageMetadata(int projectId, int version, String uploader, byte[] hash) {
+  public StorageMetadata(final int projectId, final int version, final String uploader,
+      final byte[] hash) {
     this.projectId = projectId;
     this.version = version;
     this.uploader = requireNonNull(uploader);
@@ -37,41 +39,43 @@ public class StorageMetadata {
 
   @Override
   public String toString() {
-    return "StorageMetadata{" + "projectId='" + projectId + '\'' + ", version='" + version + '\'' + '}';
+    return "StorageMetadata{" + "projectId='" + this.projectId + '\'' + ", version='" + this.version
+        + '\''
+        + '}';
   }
 
   public int getProjectId() {
-    return projectId;
+    return this.projectId;
   }
 
   public int getVersion() {
-    return version;
+    return this.version;
   }
 
   public String getUploader() {
-    return uploader;
+    return this.uploader;
   }
 
   public byte[] getHash() {
-    return hash;
+    return this.hash;
   }
 
   @Override
-  public boolean equals(Object o) {
+  public boolean equals(final Object o) {
     if (this == o) {
       return true;
     }
     if (o == null || getClass() != o.getClass()) {
       return false;
     }
-    StorageMetadata that = (StorageMetadata) o;
-    return Objects.equals(projectId, that.projectId) &&
-        Objects.equals(version, that.version) &&
-        Objects.equals(uploader, that.uploader);
+    final StorageMetadata that = (StorageMetadata) o;
+    return Objects.equals(this.projectId, that.projectId) &&
+        Objects.equals(this.version, that.version) &&
+        Objects.equals(this.uploader, that.uploader);
   }
 
   @Override
   public int hashCode() {
-    return Objects.hash(projectId, version, uploader);
+    return Objects.hash(this.projectId, this.version, this.uploader);
   }
 }
diff --git a/azkaban-test/src/test/java/azkaban/test/executions/TestExecutions.java b/azkaban-test/src/test/java/azkaban/test/executions/TestExecutions.java
index 56cf0c6..a3ebe75 100644
--- a/azkaban-test/src/test/java/azkaban/test/executions/TestExecutions.java
+++ b/azkaban-test/src/test/java/azkaban/test/executions/TestExecutions.java
@@ -17,14 +17,14 @@
 package azkaban.test.executions;
 
 import java.io.File;
-import java.net.URL;
 import java.net.URISyntaxException;
-
+import java.net.URL;
 import org.junit.Assert;
 
 public class TestExecutions {
+
   public static File getFlowDir(final String path) throws URISyntaxException {
-    URL url = TestExecutions.class.getResource(path);
+    final URL url = TestExecutions.class.getResource(path);
     Assert.assertNotNull(url);
     return new File(url.toURI());
   }
diff --git a/azkaban-test/src/test/resources/azkaban/test/azkaban-users.xml b/azkaban-test/src/test/resources/azkaban/test/azkaban-users.xml
index 55941a7..75f2d0f 100644
--- a/azkaban-test/src/test/resources/azkaban/test/azkaban-users.xml
+++ b/azkaban-test/src/test/resources/azkaban/test/azkaban-users.xml
@@ -1,5 +1,5 @@
 <azkaban-users>
-	<user username="testAdmin" password="testAdmin" roles="admin" groups="azkaban" />
-	<user username="testUser" password="testUser" />
-	<role name="admin" permissions="ADMIN" />
+  <user groups="azkaban" password="testAdmin" roles="admin" username="testAdmin"/>
+  <user password="testUser" username="testUser"/>
+  <role name="admin" permissions="ADMIN"/>
 </azkaban-users>
diff --git a/azkaban-web-server/src/test/java/azkaban/fixture/VelocityTemplateTestUtil.java b/azkaban-web-server/src/test/java/azkaban/fixture/VelocityTemplateTestUtil.java
index 8eb730f..3a95cf5 100644
--- a/azkaban-web-server/src/test/java/azkaban/fixture/VelocityTemplateTestUtil.java
+++ b/azkaban-web-server/src/test/java/azkaban/fixture/VelocityTemplateTestUtil.java
@@ -19,9 +19,9 @@ public class VelocityTemplateTestUtil {
    * @param context the context
    * @return string
    */
-  public static String renderTemplate(String templateName, VelocityContext context) {
-    StringWriter stringWriter = new StringWriter();
-    VelocityEngine engine = new VelocityEngine();
+  public static String renderTemplate(final String templateName, final VelocityContext context) {
+    final StringWriter stringWriter = new StringWriter();
+    final VelocityEngine engine = new VelocityEngine();
     engine.init("src/test/resources/velocity.properties");
 
     engine.mergeTemplate(TEMPLATE_BASE_DIR + templateName + ".vm", "UTF-8", context, stringWriter);
diff --git a/azkaban-web-server/src/test/java/azkaban/webapp/servlet/ProjectSideBarViewTest.java b/azkaban-web-server/src/test/java/azkaban/webapp/servlet/ProjectSideBarViewTest.java
index 134387c..ec2bc59 100644
--- a/azkaban-web-server/src/test/java/azkaban/webapp/servlet/ProjectSideBarViewTest.java
+++ b/azkaban-web-server/src/test/java/azkaban/webapp/servlet/ProjectSideBarViewTest.java
@@ -25,9 +25,9 @@ public class ProjectSideBarViewTest {
   @Test
   public void testProjectSideBarView()
       throws Exception {
-    VelocityContext context = VelocityContextTestUtil.getInstance();
+    final VelocityContext context = VelocityContextTestUtil.getInstance();
 
-    Project project = MockProject.getMockProject();
+    final Project project = MockProject.getMockProject();
 
     // Intentionally tries to inject a Javascript.
     project.setDescription("<script>window.alert(\"hacked\")</script>");
@@ -36,8 +36,8 @@ public class ProjectSideBarViewTest {
     context.put("admins", "admin_name");
     context.put("userpermission", "admin_permission");
 
-    String result = VelocityTemplateTestUtil.renderTemplate("projectsidebar", context);
-    String actual = FileAssertion.surroundWithHtmlTag(result);
+    final String result = VelocityTemplateTestUtil.renderTemplate("projectsidebar", context);
+    final String actual = FileAssertion.surroundWithHtmlTag(result);
     WebFileAssertion.assertStringEqualFileContent("project-side-bar.html", actual);
   }
 }
\ No newline at end of file
diff --git a/azkaban-web-server/src/web/fonts/glyphicons-halflings-regular.svg b/azkaban-web-server/src/web/fonts/glyphicons-halflings-regular.svg
index 4469488..41de871 100644
--- a/azkaban-web-server/src/web/fonts/glyphicons-halflings-regular.svg
+++ b/azkaban-web-server/src/web/fonts/glyphicons-halflings-regular.svg
@@ -1,229 +1,595 @@
 <?xml version="1.0" standalone="no"?>
 <!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" >
 <svg xmlns="http://www.w3.org/2000/svg">
-<metadata></metadata>
-<defs>
-<font id="glyphicons_halflingsregular" horiz-adv-x="1200" >
-<font-face units-per-em="1200" ascent="960" descent="-240" />
-<missing-glyph horiz-adv-x="500" />
-<glyph />
-<glyph />
-<glyph unicode="&#xd;" />
-<glyph unicode=" " />
-<glyph unicode="*" d="M100 500v200h259l-183 183l141 141l183 -183v259h200v-259l183 183l141 -141l-183 -183h259v-200h-259l183 -183l-141 -141l-183 183v-259h-200v259l-183 -183l-141 141l183 183h-259z" />
-<glyph unicode="+" d="M0 400v300h400v400h300v-400h400v-300h-400v-400h-300v400h-400z" />
-<glyph unicode="&#xa0;" />
-<glyph unicode="&#x2000;" horiz-adv-x="652" />
-<glyph unicode="&#x2001;" horiz-adv-x="1304" />
-<glyph unicode="&#x2002;" horiz-adv-x="652" />
-<glyph unicode="&#x2003;" horiz-adv-x="1304" />
-<glyph unicode="&#x2004;" horiz-adv-x="434" />
-<glyph unicode="&#x2005;" horiz-adv-x="326" />
-<glyph unicode="&#x2006;" horiz-adv-x="217" />
-<glyph unicode="&#x2007;" horiz-adv-x="217" />
-<glyph unicode="&#x2008;" horiz-adv-x="163" />
-<glyph unicode="&#x2009;" horiz-adv-x="260" />
-<glyph unicode="&#x200a;" horiz-adv-x="72" />
-<glyph unicode="&#x202f;" horiz-adv-x="260" />
-<glyph unicode="&#x205f;" horiz-adv-x="326" />
-<glyph unicode="&#x20ac;" d="M100 500l100 100h113q0 47 5 100h-218l100 100h135q37 167 112 257q117 141 297 141q242 0 354 -189q60 -103 66 -209h-181q0 55 -25.5 99t-63.5 68t-75 36.5t-67 12.5q-24 0 -52.5 -10t-62.5 -32t-65.5 -67t-50.5 -107h379l-100 -100h-300q-6 -46 -6 -100h406l-100 -100 h-300q9 -74 33 -132t52.5 -91t62 -54.5t59 -29t46.5 -7.5q29 0 66 13t75 37t63.5 67.5t25.5 96.5h174q-31 -172 -128 -278q-107 -117 -274 -117q-205 0 -324 158q-36 46 -69 131.5t-45 205.5h-217z" />
-<glyph unicode="&#x2212;" d="M200 400h900v300h-900v-300z" />
-<glyph unicode="&#x2601;" d="M-14 494q0 -80 56.5 -137t135.5 -57h750q120 0 205 86t85 208q0 120 -85 206.5t-205 86.5q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5z" />
-<glyph unicode="&#x2709;" d="M0 100l400 400l200 -200l200 200l400 -400h-1200zM0 300v600l300 -300zM0 1100l600 -603l600 603h-1200zM900 600l300 300v-600z" />
-<glyph unicode="&#x270f;" d="M-13 -13l333 112l-223 223zM187 403l214 -214l614 614l-214 214zM887 1103l214 -214l99 92q13 13 13 32.5t-13 33.5l-153 153q-15 13 -33 13t-33 -13z" />
-<glyph unicode="&#xe000;" horiz-adv-x="500" d="M0 0z" />
-<glyph unicode="&#xe001;" d="M0 1200h1200l-500 -550v-550h300v-100h-800v100h300v550z" />
-<glyph unicode="&#xe002;" d="M14 84q18 -55 86 -75.5t147 5.5q65 21 109 69t44 90v606l600 155v-521q-64 16 -138 -7q-79 -26 -122.5 -83t-25.5 -111q17 -55 85.5 -75.5t147.5 4.5q70 23 111.5 63.5t41.5 95.5v881q0 10 -7 15.5t-17 2.5l-752 -193q-10 -3 -17 -12.5t-7 -19.5v-689q-64 17 -138 -7 q-79 -25 -122.5 -82t-25.5 -112z" />
-<glyph unicode="&#xe003;" d="M23 693q0 200 142 342t342 142t342 -142t142 -342q0 -142 -78 -261l300 -300q7 -8 7 -18t-7 -18l-109 -109q-8 -7 -18 -7t-18 7l-300 300q-119 -78 -261 -78q-200 0 -342 142t-142 342zM176 693q0 -136 97 -233t234 -97t233.5 96.5t96.5 233.5t-96.5 233.5t-233.5 96.5 t-234 -97t-97 -233z" />
-<glyph unicode="&#xe005;" d="M100 784q0 64 28 123t73 100.5t104.5 64t119 20.5t120 -38.5t104.5 -104.5q48 69 109.5 105t121.5 38t118.5 -20.5t102.5 -64t71 -100.5t27 -123q0 -57 -33.5 -117.5t-94 -124.5t-126.5 -127.5t-150 -152.5t-146 -174q-62 85 -145.5 174t-149.5 152.5t-126.5 127.5 t-94 124.5t-33.5 117.5z" />
-<glyph unicode="&#xe006;" d="M-72 800h479l146 400h2l146 -400h472l-382 -278l145 -449l-384 275l-382 -275l146 447zM168 71l2 1z" />
-<glyph unicode="&#xe007;" d="M-72 800h479l146 400h2l146 -400h472l-382 -278l145 -449l-384 275l-382 -275l146 447zM168 71l2 1zM237 700l196 -142l-73 -226l192 140l195 -141l-74 229l193 140h-235l-77 211l-78 -211h-239z" />
-<glyph unicode="&#xe008;" d="M0 0v143l400 257v100q-37 0 -68.5 74.5t-31.5 125.5v200q0 124 88 212t212 88t212 -88t88 -212v-200q0 -51 -31.5 -125.5t-68.5 -74.5v-100l400 -257v-143h-1200z" />
-<glyph unicode="&#xe009;" d="M0 0v1100h1200v-1100h-1200zM100 100h100v100h-100v-100zM100 300h100v100h-100v-100zM100 500h100v100h-100v-100zM100 700h100v100h-100v-100zM100 900h100v100h-100v-100zM300 100h600v400h-600v-400zM300 600h600v400h-600v-400zM1000 100h100v100h-100v-100z M1000 300h100v100h-100v-100zM1000 500h100v100h-100v-100zM1000 700h100v100h-100v-100zM1000 900h100v100h-100v-100z" />
-<glyph unicode="&#xe010;" d="M0 50v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5zM0 650v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400 q-21 0 -35.5 14.5t-14.5 35.5zM600 50v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5zM600 650v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400 q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5z" />
-<glyph unicode="&#xe011;" d="M0 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM0 450v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200 q-21 0 -35.5 14.5t-14.5 35.5zM0 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5 t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 450v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5 v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 450v200q0 21 14.5 35.5t35.5 14.5h200 q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5z" />
-<glyph unicode="&#xe012;" d="M0 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM0 450q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v200q0 21 -14.5 35.5t-35.5 14.5h-200q-21 0 -35.5 -14.5 t-14.5 -35.5v-200zM0 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 50v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5 t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5zM400 450v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5zM400 850v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5 v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5z" />
-<glyph unicode="&#xe013;" d="M29 454l419 -420l818 820l-212 212l-607 -607l-206 207z" />
-<glyph unicode="&#xe014;" d="M106 318l282 282l-282 282l212 212l282 -282l282 282l212 -212l-282 -282l282 -282l-212 -212l-282 282l-282 -282z" />
-<glyph unicode="&#xe015;" d="M23 693q0 200 142 342t342 142t342 -142t142 -342q0 -142 -78 -261l300 -300q7 -8 7 -18t-7 -18l-109 -109q-8 -7 -18 -7t-18 7l-300 300q-119 -78 -261 -78q-200 0 -342 142t-142 342zM176 693q0 -136 97 -233t234 -97t233.5 96.5t96.5 233.5t-96.5 233.5t-233.5 96.5 t-234 -97t-97 -233zM300 600v200h100v100h200v-100h100v-200h-100v-100h-200v100h-100z" />
-<glyph unicode="&#xe016;" d="M23 694q0 200 142 342t342 142t342 -142t142 -342q0 -141 -78 -262l300 -299q7 -7 7 -18t-7 -18l-109 -109q-8 -8 -18 -8t-18 8l-300 299q-120 -77 -261 -77q-200 0 -342 142t-142 342zM176 694q0 -136 97 -233t234 -97t233.5 97t96.5 233t-96.5 233t-233.5 97t-234 -97 t-97 -233zM300 601h400v200h-400v-200z" />
-<glyph unicode="&#xe017;" d="M23 600q0 183 105 331t272 210v-166q-103 -55 -165 -155t-62 -220q0 -177 125 -302t302 -125t302 125t125 302q0 120 -62 220t-165 155v166q167 -62 272 -210t105 -331q0 -118 -45.5 -224.5t-123 -184t-184 -123t-224.5 -45.5t-224.5 45.5t-184 123t-123 184t-45.5 224.5 zM500 750q0 -21 14.5 -35.5t35.5 -14.5h100q21 0 35.5 14.5t14.5 35.5v400q0 21 -14.5 35.5t-35.5 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-400z" />
-<glyph unicode="&#xe018;" d="M100 1h200v300h-200v-300zM400 1v500h200v-500h-200zM700 1v800h200v-800h-200zM1000 1v1200h200v-1200h-200z" />
-<glyph unicode="&#xe019;" d="M26 601q0 -33 6 -74l151 -38l2 -6q14 -49 38 -93l3 -5l-80 -134q45 -59 105 -105l133 81l5 -3q45 -26 94 -39l5 -2l38 -151q40 -5 74 -5q27 0 74 5l38 151l6 2q46 13 93 39l5 3l134 -81q56 44 104 105l-80 134l3 5q24 44 39 93l1 6l152 38q5 40 5 74q0 28 -5 73l-152 38 l-1 6q-16 51 -39 93l-3 5l80 134q-44 58 -104 105l-134 -81l-5 3q-45 25 -93 39l-6 1l-38 152q-40 5 -74 5q-27 0 -74 -5l-38 -152l-5 -1q-50 -14 -94 -39l-5 -3l-133 81q-59 -47 -105 -105l80 -134l-3 -5q-25 -47 -38 -93l-2 -6l-151 -38q-6 -48 -6 -73zM385 601 q0 88 63 151t152 63t152 -63t63 -151q0 -89 -63 -152t-152 -63t-152 63t-63 152z" />
-<glyph unicode="&#xe020;" d="M100 1025v50q0 10 7.5 17.5t17.5 7.5h275v100q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5v-100h275q10 0 17.5 -7.5t7.5 -17.5v-50q0 -11 -7 -18t-18 -7h-1050q-11 0 -18 7t-7 18zM200 100v800h900v-800q0 -41 -29.5 -71t-70.5 -30h-700q-41 0 -70.5 30 t-29.5 71zM300 100h100v700h-100v-700zM500 100h100v700h-100v-700zM500 1100h300v100h-300v-100zM700 100h100v700h-100v-700zM900 100h100v700h-100v-700z" />
-<glyph unicode="&#xe021;" d="M1 601l656 644l644 -644h-200v-600h-300v400h-300v-400h-300v600h-200z" />
-<glyph unicode="&#xe022;" d="M100 25v1150q0 11 7 18t18 7h475v-500h400v-675q0 -11 -7 -18t-18 -7h-850q-11 0 -18 7t-7 18zM700 800v300l300 -300h-300z" />
-<glyph unicode="&#xe023;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM500 500v400h100 v-300h200v-100h-300z" />
-<glyph unicode="&#xe024;" d="M-100 0l431 1200h209l-21 -300h162l-20 300h208l431 -1200h-538l-41 400h-242l-40 -400h-539zM488 500h224l-27 300h-170z" />
-<glyph unicode="&#xe025;" d="M0 0v400h490l-290 300h200v500h300v-500h200l-290 -300h490v-400h-1100zM813 200h175v100h-175v-100z" />
-<glyph unicode="&#xe026;" d="M1 600q0 122 47.5 233t127.5 191t191 127.5t233 47.5t233 -47.5t191 -127.5t127.5 -191t47.5 -233t-47.5 -233t-127.5 -191t-191 -127.5t-233 -47.5t-233 47.5t-191 127.5t-127.5 191t-47.5 233zM188 600q0 -170 121 -291t291 -121t291 121t121 291t-121 291t-291 121 t-291 -121t-121 -291zM350 600h150v300h200v-300h150l-250 -300z" />
-<glyph unicode="&#xe027;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM350 600l250 300 l250 -300h-150v-300h-200v300h-150z" />
-<glyph unicode="&#xe028;" d="M0 25v475l200 700h800q199 -700 200 -700v-475q0 -11 -7 -18t-18 -7h-1150q-11 0 -18 7t-7 18zM200 500h200l50 -200h300l50 200h200l-97 500h-606z" />
-<glyph unicode="&#xe029;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -172 121.5 -293t292.5 -121t292.5 121t121.5 293q0 171 -121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM500 397v401 l297 -200z" />
-<glyph unicode="&#xe030;" d="M23 600q0 -118 45.5 -224.5t123 -184t184 -123t224.5 -45.5t224.5 45.5t184 123t123 184t45.5 224.5h-150q0 -177 -125 -302t-302 -125t-302 125t-125 302t125 302t302 125q136 0 246 -81l-146 -146h400v400l-145 -145q-157 122 -355 122q-118 0 -224.5 -45.5t-184 -123 t-123 -184t-45.5 -224.5z" />
-<glyph unicode="&#xe031;" d="M23 600q0 118 45.5 224.5t123 184t184 123t224.5 45.5q198 0 355 -122l145 145v-400h-400l147 147q-112 80 -247 80q-177 0 -302 -125t-125 -302h-150zM100 0v400h400l-147 -147q112 -80 247 -80q177 0 302 125t125 302h150q0 -118 -45.5 -224.5t-123 -184t-184 -123 t-224.5 -45.5q-198 0 -355 122z" />
-<glyph unicode="&#xe032;" d="M100 0h1100v1200h-1100v-1200zM200 100v900h900v-900h-900zM300 200v100h100v-100h-100zM300 400v100h100v-100h-100zM300 600v100h100v-100h-100zM300 800v100h100v-100h-100zM500 200h500v100h-500v-100zM500 400v100h500v-100h-500zM500 600v100h500v-100h-500z M500 800v100h500v-100h-500z" />
-<glyph unicode="&#xe033;" d="M0 100v600q0 41 29.5 70.5t70.5 29.5h100v200q0 82 59 141t141 59h300q82 0 141 -59t59 -141v-200h100q41 0 70.5 -29.5t29.5 -70.5v-600q0 -41 -29.5 -70.5t-70.5 -29.5h-900q-41 0 -70.5 29.5t-29.5 70.5zM400 800h300v150q0 21 -14.5 35.5t-35.5 14.5h-200 q-21 0 -35.5 -14.5t-14.5 -35.5v-150z" />
-<glyph unicode="&#xe034;" d="M100 0v1100h100v-1100h-100zM300 400q60 60 127.5 84t127.5 17.5t122 -23t119 -30t110 -11t103 42t91 120.5v500q-40 -81 -101.5 -115.5t-127.5 -29.5t-138 25t-139.5 40t-125.5 25t-103 -29.5t-65 -115.5v-500z" />
-<glyph unicode="&#xe035;" d="M0 275q0 -11 7 -18t18 -7h50q11 0 18 7t7 18v300q0 127 70.5 231.5t184.5 161.5t245 57t245 -57t184.5 -161.5t70.5 -231.5v-300q0 -11 7 -18t18 -7h50q11 0 18 7t7 18v300q0 116 -49.5 227t-131 192.5t-192.5 131t-227 49.5t-227 -49.5t-192.5 -131t-131 -192.5 t-49.5 -227v-300zM200 20v460q0 8 6 14t14 6h160q8 0 14 -6t6 -14v-460q0 -8 -6 -14t-14 -6h-160q-8 0 -14 6t-6 14zM800 20v460q0 8 6 14t14 6h160q8 0 14 -6t6 -14v-460q0 -8 -6 -14t-14 -6h-160q-8 0 -14 6t-6 14z" />
-<glyph unicode="&#xe036;" d="M0 400h300l300 -200v800l-300 -200h-300v-400zM688 459l141 141l-141 141l71 71l141 -141l141 141l71 -71l-141 -141l141 -141l-71 -71l-141 141l-141 -141z" />
-<glyph unicode="&#xe037;" d="M0 400h300l300 -200v800l-300 -200h-300v-400zM700 857l69 53q111 -135 111 -310q0 -169 -106 -302l-67 54q86 110 86 248q0 146 -93 257z" />
-<glyph unicode="&#xe038;" d="M0 401v400h300l300 200v-800l-300 200h-300zM702 858l69 53q111 -135 111 -310q0 -170 -106 -303l-67 55q86 110 86 248q0 145 -93 257zM889 951l7 -8q123 -151 123 -344q0 -189 -119 -339l-7 -8l81 -66l6 8q142 178 142 405q0 230 -144 408l-6 8z" />
-<glyph unicode="&#xe039;" d="M0 0h500v500h-200v100h-100v-100h-200v-500zM0 600h100v100h400v100h100v100h-100v300h-500v-600zM100 100v300h300v-300h-300zM100 800v300h300v-300h-300zM200 200v100h100v-100h-100zM200 900h100v100h-100v-100zM500 500v100h300v-300h200v-100h-100v-100h-200v100 h-100v100h100v200h-200zM600 0v100h100v-100h-100zM600 1000h100v-300h200v-300h300v200h-200v100h200v500h-600v-200zM800 800v300h300v-300h-300zM900 0v100h300v-100h-300zM900 900v100h100v-100h-100zM1100 200v100h100v-100h-100z" />
-<glyph unicode="&#xe040;" d="M0 200h100v1000h-100v-1000zM100 0v100h300v-100h-300zM200 200v1000h100v-1000h-100zM500 0v91h100v-91h-100zM500 200v1000h200v-1000h-200zM700 0v91h100v-91h-100zM800 200v1000h100v-1000h-100zM900 0v91h200v-91h-200zM1000 200v1000h200v-1000h-200z" />
-<glyph unicode="&#xe041;" d="M1 700v475q0 10 7.5 17.5t17.5 7.5h474l700 -700l-500 -500zM148 953q0 -42 29 -71q30 -30 71.5 -30t71.5 30q29 29 29 71t-29 71q-30 30 -71.5 30t-71.5 -30q-29 -29 -29 -71z" />
-<glyph unicode="&#xe042;" d="M2 700v475q0 11 7 18t18 7h474l700 -700l-500 -500zM148 953q0 -42 30 -71q29 -30 71 -30t71 30q30 29 30 71t-30 71q-29 30 -71 30t-71 -30q-30 -29 -30 -71zM701 1200h100l700 -700l-500 -500l-50 50l450 450z" />
-<glyph unicode="&#xe043;" d="M100 0v1025l175 175h925v-1000l-100 -100v1000h-750l-100 -100h750v-1000h-900z" />
-<glyph unicode="&#xe044;" d="M200 0l450 444l450 -443v1150q0 20 -14.5 35t-35.5 15h-800q-21 0 -35.5 -15t-14.5 -35v-1151z" />
-<glyph unicode="&#xe045;" d="M0 100v700h200l100 -200h600l100 200h200v-700h-200v200h-800v-200h-200zM253 829l40 -124h592l62 124l-94 346q-2 11 -10 18t-18 7h-450q-10 0 -18 -7t-10 -18zM281 24l38 152q2 10 11.5 17t19.5 7h500q10 0 19.5 -7t11.5 -17l38 -152q2 -10 -3.5 -17t-15.5 -7h-600 q-10 0 -15.5 7t-3.5 17z" />
-<glyph unicode="&#xe046;" d="M0 200q0 -41 29.5 -70.5t70.5 -29.5h1000q41 0 70.5 29.5t29.5 70.5v600q0 41 -29.5 70.5t-70.5 29.5h-150q-4 8 -11.5 21.5t-33 48t-53 61t-69 48t-83.5 21.5h-200q-41 0 -82 -20.5t-70 -50t-52 -59t-34 -50.5l-12 -20h-150q-41 0 -70.5 -29.5t-29.5 -70.5v-600z M356 500q0 100 72 172t172 72t172 -72t72 -172t-72 -172t-172 -72t-172 72t-72 172zM494 500q0 -44 31 -75t75 -31t75 31t31 75t-31 75t-75 31t-75 -31t-31 -75zM900 700v100h100v-100h-100z" />
-<glyph unicode="&#xe047;" d="M53 0h365v66q-41 0 -72 11t-49 38t1 71l92 234h391l82 -222q16 -45 -5.5 -88.5t-74.5 -43.5v-66h417v66q-34 1 -74 43q-18 19 -33 42t-21 37l-6 13l-385 998h-93l-399 -1006q-24 -48 -52 -75q-12 -12 -33 -25t-36 -20l-15 -7v-66zM416 521l178 457l46 -140l116 -317h-340 z" />
-<glyph unicode="&#xe048;" d="M100 0v89q41 7 70.5 32.5t29.5 65.5v827q0 28 -1 39.5t-5.5 26t-15.5 21t-29 14t-49 14.5v70h471q120 0 213 -88t93 -228q0 -55 -11.5 -101.5t-28 -74t-33.5 -47.5t-28 -28l-12 -7q8 -3 21.5 -9t48 -31.5t60.5 -58t47.5 -91.5t21.5 -129q0 -84 -59 -156.5t-142 -111 t-162 -38.5h-500zM400 200h161q89 0 153 48.5t64 132.5q0 90 -62.5 154.5t-156.5 64.5h-159v-400zM400 700h139q76 0 130 61.5t54 138.5q0 82 -84 130.5t-239 48.5v-379z" />
-<glyph unicode="&#xe049;" d="M200 0v57q77 7 134.5 40.5t65.5 80.5l173 849q10 56 -10 74t-91 37q-6 1 -10.5 2.5t-9.5 2.5v57h425l2 -57q-33 -8 -62 -25.5t-46 -37t-29.5 -38t-17.5 -30.5l-5 -12l-128 -825q-10 -52 14 -82t95 -36v-57h-500z" />
-<glyph unicode="&#xe050;" d="M-75 200h75v800h-75l125 167l125 -167h-75v-800h75l-125 -167zM300 900v300h150h700h150v-300h-50q0 29 -8 48.5t-18.5 30t-33.5 15t-39.5 5.5t-50.5 1h-200v-850l100 -50v-100h-400v100l100 50v850h-200q-34 0 -50.5 -1t-40 -5.5t-33.5 -15t-18.5 -30t-8.5 -48.5h-49z " />
-<glyph unicode="&#xe051;" d="M33 51l167 125v-75h800v75l167 -125l-167 -125v75h-800v-75zM100 901v300h150h700h150v-300h-50q0 29 -8 48.5t-18 30t-33.5 15t-40 5.5t-50.5 1h-200v-650l100 -50v-100h-400v100l100 50v650h-200q-34 0 -50.5 -1t-39.5 -5.5t-33.5 -15t-18.5 -30t-8 -48.5h-50z" />
-<glyph unicode="&#xe052;" d="M0 50q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 350q0 -20 14.5 -35t35.5 -15h800q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-800q-21 0 -35.5 -14.5t-14.5 -35.5 v-100zM0 650q0 -20 14.5 -35t35.5 -15h1000q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1000q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 950q0 -20 14.5 -35t35.5 -15h600q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-600q-21 0 -35.5 -14.5 t-14.5 -35.5v-100z" />
-<glyph unicode="&#xe053;" d="M0 50q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 650q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5 v-100zM200 350q0 -20 14.5 -35t35.5 -15h700q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-700q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM200 950q0 -20 14.5 -35t35.5 -15h700q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-700q-21 0 -35.5 -14.5 t-14.5 -35.5v-100z" />
-<glyph unicode="&#xe054;" d="M0 50v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM100 650v100q0 21 14.5 35.5t35.5 14.5h1000q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1000q-21 0 -35.5 15 t-14.5 35zM300 350v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM500 950v100q0 21 14.5 35.5t35.5 14.5h600q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-600 q-21 0 -35.5 15t-14.5 35z" />
-<glyph unicode="&#xe055;" d="M0 50v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM0 350v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15 t-14.5 35zM0 650v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM0 950v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100 q-21 0 -35.5 15t-14.5 35z" />
-<glyph unicode="&#xe056;" d="M0 50v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15t-14.5 35zM0 350v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15 t-14.5 35zM0 650v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15t-14.5 35zM0 950v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15 t-14.5 35zM300 50v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM300 350v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800 q-21 0 -35.5 15t-14.5 35zM300 650v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM300 950v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15 h-800q-21 0 -35.5 15t-14.5 35z" />
-<glyph unicode="&#xe057;" d="M-101 500v100h201v75l166 -125l-166 -125v75h-201zM300 0h100v1100h-100v-1100zM500 50q0 -20 14.5 -35t35.5 -15h600q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-600q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 350q0 -20 14.5 -35t35.5 -15h300q20 0 35 15t15 35 v100q0 21 -15 35.5t-35 14.5h-300q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 650q0 -20 14.5 -35t35.5 -15h500q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-500q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 950q0 -20 14.5 -35t35.5 -15h100q20 0 35 15t15 35v100 q0 21 -15 35.5t-35 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-100z" />
-<glyph unicode="&#xe058;" d="M1 50q0 -20 14.5 -35t35.5 -15h600q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-600q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 350q0 -20 14.5 -35t35.5 -15h300q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-300q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 650 q0 -20 14.5 -35t35.5 -15h500q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-500q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 950q0 -20 14.5 -35t35.5 -15h100q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM801 0v1100h100v-1100 h-100zM934 550l167 -125v75h200v100h-200v75z" />
-<glyph unicode="&#xe059;" d="M0 275v650q0 31 22 53t53 22h750q31 0 53 -22t22 -53v-650q0 -31 -22 -53t-53 -22h-750q-31 0 -53 22t-22 53zM900 600l300 300v-600z" />
-<glyph unicode="&#xe060;" d="M0 44v1012q0 18 13 31t31 13h1112q19 0 31.5 -13t12.5 -31v-1012q0 -18 -12.5 -31t-31.5 -13h-1112q-18 0 -31 13t-13 31zM100 263l247 182l298 -131l-74 156l293 318l236 -288v500h-1000v-737zM208 750q0 56 39 95t95 39t95 -39t39 -95t-39 -95t-95 -39t-95 39t-39 95z " />
-<glyph unicode="&#xe062;" d="M148 745q0 124 60.5 231.5t165 172t226.5 64.5q123 0 227 -63t164.5 -169.5t60.5 -229.5t-73 -272q-73 -114 -166.5 -237t-150.5 -189l-57 -66q-10 9 -27 26t-66.5 70.5t-96 109t-104 135.5t-100.5 155q-63 139 -63 262zM342 772q0 -107 75.5 -182.5t181.5 -75.5 q107 0 182.5 75.5t75.5 182.5t-75.5 182t-182.5 75t-182 -75.5t-75 -181.5z" />
-<glyph unicode="&#xe063;" d="M1 600q0 122 47.5 233t127.5 191t191 127.5t233 47.5t233 -47.5t191 -127.5t127.5 -191t47.5 -233t-47.5 -233t-127.5 -191t-191 -127.5t-233 -47.5t-233 47.5t-191 127.5t-127.5 191t-47.5 233zM173 600q0 -177 125.5 -302t301.5 -125v854q-176 0 -301.5 -125 t-125.5 -302z" />
-<glyph unicode="&#xe064;" d="M117 406q0 94 34 186t88.5 172.5t112 159t115 177t87.5 194.5q21 -71 57.5 -142.5t76 -130.5t83 -118.5t82 -117t70 -116t50 -125.5t18.5 -136q0 -89 -39 -165.5t-102 -126.5t-140 -79.5t-156 -33.5q-114 6 -211.5 53t-161.5 138.5t-64 210.5zM243 414q14 -82 59.5 -136 t136.5 -80l16 98q-7 6 -18 17t-34 48t-33 77q-15 73 -14 143.5t10 122.5l9 51q-92 -110 -119.5 -185t-12.5 -156z" />
-<glyph unicode="&#xe065;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5q366 -6 397 -14l-186 -186h-311q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v125l200 200v-225q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5 t-117.5 282.5zM436 341l161 50l412 412l-114 113l-405 -405zM995 1015l113 -113l113 113l-21 85l-92 28z" />
-<glyph unicode="&#xe066;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h261l2 -80q-133 -32 -218 -120h-145q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5l200 153v-53q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5 zM423 524q30 38 81.5 64t103 35.5t99 14t77.5 3.5l29 -1v-209l360 324l-359 318v-216q-7 0 -19 -1t-48 -8t-69.5 -18.5t-76.5 -37t-76.5 -59t-62 -88t-39.5 -121.5z" />
-<glyph unicode="&#xe067;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q60 0 127 -23l-178 -177h-349q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v69l200 200v-169q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5 t-117.5 282.5zM342 632l283 -284l566 567l-136 137l-430 -431l-147 147z" />
-<glyph unicode="&#xe068;" d="M0 603l300 296v-198h200v200h-200l300 300l295 -300h-195v-200h200v198l300 -296l-300 -300v198h-200v-200h195l-295 -300l-300 300h200v200h-200v-198z" />
-<glyph unicode="&#xe069;" d="M200 50v1000q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-437l500 487v-1100l-500 488v-438q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5z" />
-<glyph unicode="&#xe070;" d="M0 50v1000q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-437l500 487v-487l500 487v-1100l-500 488v-488l-500 488v-438q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5z" />
-<glyph unicode="&#xe071;" d="M136 550l564 550v-487l500 487v-1100l-500 488v-488z" />
-<glyph unicode="&#xe072;" d="M200 0l900 550l-900 550v-1100z" />
-<glyph unicode="&#xe073;" d="M200 150q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v800q0 21 -14.5 35.5t-35.5 14.5h-200q-21 0 -35.5 -14.5t-14.5 -35.5v-800zM600 150q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v800q0 21 -14.5 35.5t-35.5 14.5h-200 q-21 0 -35.5 -14.5t-14.5 -35.5v-800z" />
-<glyph unicode="&#xe074;" d="M200 150q0 -20 14.5 -35t35.5 -15h800q21 0 35.5 15t14.5 35v800q0 21 -14.5 35.5t-35.5 14.5h-800q-21 0 -35.5 -14.5t-14.5 -35.5v-800z" />
-<glyph unicode="&#xe075;" d="M0 0v1100l500 -487v487l564 -550l-564 -550v488z" />
-<glyph unicode="&#xe076;" d="M0 0v1100l500 -487v487l500 -487v437q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-1000q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5v438l-500 -488v488z" />
-<glyph unicode="&#xe077;" d="M300 0v1100l500 -487v437q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-1000q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5v438z" />
-<glyph unicode="&#xe078;" d="M100 250v100q0 21 14.5 35.5t35.5 14.5h1000q21 0 35.5 -14.5t14.5 -35.5v-100q0 -21 -14.5 -35.5t-35.5 -14.5h-1000q-21 0 -35.5 14.5t-14.5 35.5zM100 500h1100l-550 564z" />
-<glyph unicode="&#xe079;" d="M185 599l592 -592l240 240l-353 353l353 353l-240 240z" />
-<glyph unicode="&#xe080;" d="M272 194l353 353l-353 353l241 240l572 -571l21 -22l-1 -1v-1l-592 -591z" />
-<glyph unicode="&#xe081;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM300 500h200v-200h200v200h200v200h-200v200h-200v-200h-200v-200z" />
-<glyph unicode="&#xe082;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM300 500h600v200h-600v-200z" />
-<glyph unicode="&#xe083;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM246 459l213 -213l141 142l141 -142l213 213l-142 141l142 141l-213 212l-141 -141l-141 142l-212 -213l141 -141z" />
-<glyph unicode="&#xe084;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM270 551l276 -277l411 411l-175 174l-236 -236l-102 102z" />
-<glyph unicode="&#xe085;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM363 700h144q4 0 11.5 -1t11 -1t6.5 3t3 9t1 11t3.5 8.5t3.5 6t5.5 4t6.5 2.5t9 1.5t9 0.5h11.5h12.5q19 0 30 -10t11 -26 q0 -22 -4 -28t-27 -22q-5 -1 -12.5 -3t-27 -13.5t-34 -27t-26.5 -46t-11 -68.5h200q5 3 14 8t31.5 25.5t39.5 45.5t31 69t14 94q0 51 -17.5 89t-42 58t-58.5 32t-58.5 15t-51.5 3q-105 0 -172 -56t-67 -183zM500 300h200v100h-200v-100z" />
-<glyph unicode="&#xe086;" d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM400 300h400v100h-100v300h-300v-100h100v-200h-100v-100zM500 800h200v100h-200v-100z" />
-<glyph unicode="&#xe087;" d="M0 500v200h194q15 60 36 104.5t55.5 86t88 69t126.5 40.5v200h200v-200q54 -20 113 -60t112.5 -105.5t71.5 -134.5h203v-200h-203q-25 -102 -116.5 -186t-180.5 -117v-197h-200v197q-140 27 -208 102.5t-98 200.5h-194zM290 500q24 -73 79.5 -127.5t130.5 -78.5v206h200 v-206q149 48 201 206h-201v200h200q-25 74 -76 127.5t-124 76.5v-204h-200v203q-75 -24 -130 -77.5t-79 -125.5h209v-200h-210z" />
-<glyph unicode="&#xe088;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM356 465l135 135 l-135 135l109 109l135 -135l135 135l109 -109l-135 -135l135 -135l-109 -109l-135 135l-135 -135z" />
-<glyph unicode="&#xe089;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM322 537l141 141 l87 -87l204 205l142 -142l-346 -345z" />
-<glyph unicode="&#xe090;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -115 62 -215l568 567q-100 62 -216 62q-171 0 -292.5 -121.5t-121.5 -292.5zM391 245q97 -59 209 -59q171 0 292.5 121.5t121.5 292.5 q0 112 -59 209z" />
-<glyph unicode="&#xe091;" d="M0 547l600 453v-300h600v-300h-600v-301z" />
-<glyph unicode="&#xe092;" d="M0 400v300h600v300l600 -453l-600 -448v301h-600z" />
-<glyph unicode="&#xe093;" d="M204 600l450 600l444 -600h-298v-600h-300v600h-296z" />
-<glyph unicode="&#xe094;" d="M104 600h296v600h300v-600h298l-449 -600z" />
-<glyph unicode="&#xe095;" d="M0 200q6 132 41 238.5t103.5 193t184 138t271.5 59.5v271l600 -453l-600 -448v301q-95 -2 -183 -20t-170 -52t-147 -92.5t-100 -135.5z" />
-<glyph unicode="&#xe096;" d="M0 0v400l129 -129l294 294l142 -142l-294 -294l129 -129h-400zM635 777l142 -142l294 294l129 -129v400h-400l129 -129z" />
-<glyph unicode="&#xe097;" d="M34 176l295 295l-129 129h400v-400l-129 130l-295 -295zM600 600v400l129 -129l295 295l142 -141l-295 -295l129 -130h-400z" />
-<glyph unicode="&#xe101;" d="M23 600q0 118 45.5 224.5t123 184t184 123t224.5 45.5t224.5 -45.5t184 -123t123 -184t45.5 -224.5t-45.5 -224.5t-123 -184t-184 -123t-224.5 -45.5t-224.5 45.5t-184 123t-123 184t-45.5 224.5zM456 851l58 -302q4 -20 21.5 -34.5t37.5 -14.5h54q20 0 37.5 14.5 t21.5 34.5l58 302q4 20 -8 34.5t-33 14.5h-207q-20 0 -32 -14.5t-8 -34.5zM500 300h200v100h-200v-100z" />
-<glyph unicode="&#xe102;" d="M0 800h100v-200h400v300h200v-300h400v200h100v100h-111v6t-1 15t-3 18l-34 172q-11 39 -41.5 63t-69.5 24q-32 0 -61 -17l-239 -144q-22 -13 -40 -35q-19 24 -40 36l-238 144q-33 18 -62 18q-39 0 -69.5 -23t-40.5 -61l-35 -177q-2 -8 -3 -18t-1 -15v-6h-111v-100z M100 0h400v400h-400v-400zM200 900q-3 0 14 48t35 96l18 47l214 -191h-281zM700 0v400h400v-400h-400zM731 900l202 197q5 -12 12 -32.5t23 -64t25 -72t7 -28.5h-269z" />
-<glyph unicode="&#xe103;" d="M0 -22v143l216 193q-9 53 -13 83t-5.5 94t9 113t38.5 114t74 124q47 60 99.5 102.5t103 68t127.5 48t145.5 37.5t184.5 43.5t220 58.5q0 -189 -22 -343t-59 -258t-89 -181.5t-108.5 -120t-122 -68t-125.5 -30t-121.5 -1.5t-107.5 12.5t-87.5 17t-56.5 7.5l-99 -55z M238.5 300.5q19.5 -6.5 86.5 76.5q55 66 367 234q70 38 118.5 69.5t102 79t99 111.5t86.5 148q22 50 24 60t-6 19q-7 5 -17 5t-26.5 -14.5t-33.5 -39.5q-35 -51 -113.5 -108.5t-139.5 -89.5l-61 -32q-369 -197 -458 -401q-48 -111 -28.5 -117.5z" />
-<glyph unicode="&#xe104;" d="M111 408q0 -33 5 -63q9 -56 44 -119.5t105 -108.5q31 -21 64 -16t62 23.5t57 49.5t48 61.5t35 60.5q32 66 39 184.5t-13 157.5q79 -80 122 -164t26 -184q-5 -33 -20.5 -69.5t-37.5 -80.5q-10 -19 -14.5 -29t-12 -26t-9 -23.5t-3 -19t2.5 -15.5t11 -9.5t19.5 -5t30.5 2.5 t42 8q57 20 91 34t87.5 44.5t87 64t65.5 88.5t47 122q38 172 -44.5 341.5t-246.5 278.5q22 -44 43 -129q39 -159 -32 -154q-15 2 -33 9q-79 33 -120.5 100t-44 175.5t48.5 257.5q-13 -8 -34 -23.5t-72.5 -66.5t-88.5 -105.5t-60 -138t-8 -166.5q2 -12 8 -41.5t8 -43t6 -39.5 t3.5 -39.5t-1 -33.5t-6 -31.5t-13.5 -24t-21 -20.5t-31 -12q-38 -10 -67 13t-40.5 61.5t-15 81.5t10.5 75q-52 -46 -83.5 -101t-39 -107t-7.5 -85z" />
-<glyph unicode="&#xe105;" d="M-61 600l26 40q6 10 20 30t49 63.5t74.5 85.5t97 90t116.5 83.5t132.5 59t145.5 23.5t145.5 -23.5t132.5 -59t116.5 -83.5t97 -90t74.5 -85.5t49 -63.5t20 -30l26 -40l-26 -40q-6 -10 -20 -30t-49 -63.5t-74.5 -85.5t-97 -90t-116.5 -83.5t-132.5 -59t-145.5 -23.5 t-145.5 23.5t-132.5 59t-116.5 83.5t-97 90t-74.5 85.5t-49 63.5t-20 30zM120 600q7 -10 40.5 -58t56 -78.5t68 -77.5t87.5 -75t103 -49.5t125 -21.5t123.5 20t100.5 45.5t85.5 71.5t66.5 75.5t58 81.5t47 66q-1 1 -28.5 37.5t-42 55t-43.5 53t-57.5 63.5t-58.5 54 q49 -74 49 -163q0 -124 -88 -212t-212 -88t-212 88t-88 212q0 85 46 158q-102 -87 -226 -258zM377 656q49 -124 154 -191l105 105q-37 24 -75 72t-57 84l-20 36z" />
-<glyph unicode="&#xe106;" d="M-61 600l26 40q6 10 20 30t49 63.5t74.5 85.5t97 90t116.5 83.5t132.5 59t145.5 23.5q61 0 121 -17l37 142h148l-314 -1200h-148l37 143q-82 21 -165 71.5t-140 102t-109.5 112t-72 88.5t-29.5 43zM120 600q210 -282 393 -336l37 141q-107 18 -178.5 101.5t-71.5 193.5 q0 85 46 158q-102 -87 -226 -258zM377 656q49 -124 154 -191l47 47l23 87q-30 28 -59 69t-44 68l-14 26zM780 161l38 145q22 15 44.5 34t46 44t40.5 44t41 50.5t33.5 43.5t33 44t24.5 34q-97 127 -140 175l39 146q67 -54 131.5 -125.5t87.5 -103.5t36 -52l26 -40l-26 -40 q-7 -12 -25.5 -38t-63.5 -79.5t-95.5 -102.5t-124 -100t-146.5 -79z" />
-<glyph unicode="&#xe107;" d="M-97.5 34q13.5 -34 50.5 -34h1294q37 0 50.5 35.5t-7.5 67.5l-642 1056q-20 33 -48 36t-48 -29l-642 -1066q-21 -32 -7.5 -66zM155 200l445 723l445 -723h-345v100h-200v-100h-345zM500 600l100 -300l100 300v100h-200v-100z" />
-<glyph unicode="&#xe108;" d="M100 262v41q0 20 11 44.5t26 38.5l363 325v339q0 62 44 106t106 44t106 -44t44 -106v-339l363 -325q15 -14 26 -38.5t11 -44.5v-41q0 -20 -12 -26.5t-29 5.5l-359 249v-263q100 -91 100 -113v-64q0 -21 -13 -29t-32 1l-94 78h-222l-94 -78q-19 -9 -32 -1t-13 29v64 q0 22 100 113v263l-359 -249q-17 -12 -29 -5.5t-12 26.5z" />
-<glyph unicode="&#xe109;" d="M0 50q0 -20 14.5 -35t35.5 -15h1000q21 0 35.5 15t14.5 35v750h-1100v-750zM0 900h1100v150q0 21 -14.5 35.5t-35.5 14.5h-150v100h-100v-100h-500v100h-100v-100h-150q-21 0 -35.5 -14.5t-14.5 -35.5v-150zM100 100v100h100v-100h-100zM100 300v100h100v-100h-100z M100 500v100h100v-100h-100zM300 100v100h100v-100h-100zM300 300v100h100v-100h-100zM300 500v100h100v-100h-100zM500 100v100h100v-100h-100zM500 300v100h100v-100h-100zM500 500v100h100v-100h-100zM700 100v100h100v-100h-100zM700 300v100h100v-100h-100zM700 500 v100h100v-100h-100zM900 100v100h100v-100h-100zM900 300v100h100v-100h-100zM900 500v100h100v-100h-100z" />
-<glyph unicode="&#xe110;" d="M0 200v200h259l600 600h241v198l300 -295l-300 -300v197h-159l-600 -600h-341zM0 800h259l122 -122l141 142l-181 180h-341v-200zM678 381l141 142l122 -123h159v198l300 -295l-300 -300v197h-241z" />
-<glyph unicode="&#xe111;" d="M0 400v600q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-600q0 -41 -29.5 -70.5t-70.5 -29.5h-596l-304 -300v300h-100q-41 0 -70.5 29.5t-29.5 70.5z" />
-<glyph unicode="&#xe112;" d="M100 600v200h300v-250q0 -113 6 -145q17 -92 102 -117q39 -11 92 -11q37 0 66.5 5.5t50 15.5t36 24t24 31.5t14 37.5t7 42t2.5 45t0 47v25v250h300v-200q0 -42 -3 -83t-15 -104t-31.5 -116t-58 -109.5t-89 -96.5t-129 -65.5t-174.5 -25.5t-174.5 25.5t-129 65.5t-89 96.5 t-58 109.5t-31.5 116t-15 104t-3 83zM100 900v300h300v-300h-300zM800 900v300h300v-300h-300z" />
-<glyph unicode="&#xe113;" d="M-30 411l227 -227l352 353l353 -353l226 227l-578 579z" />
-<glyph unicode="&#xe114;" d="M70 797l580 -579l578 579l-226 227l-353 -353l-352 353z" />
-<glyph unicode="&#xe115;" d="M-198 700l299 283l300 -283h-203v-400h385l215 -200h-800v600h-196zM402 1000l215 -200h381v-400h-198l299 -283l299 283h-200v600h-796z" />
-<glyph unicode="&#xe116;" d="M18 939q-5 24 10 42q14 19 39 19h896l38 162q5 17 18.5 27.5t30.5 10.5h94q20 0 35 -14.5t15 -35.5t-15 -35.5t-35 -14.5h-54l-201 -961q-2 -4 -6 -10.5t-19 -17.5t-33 -11h-31v-50q0 -20 -14.5 -35t-35.5 -15t-35.5 15t-14.5 35v50h-300v-50q0 -20 -14.5 -35t-35.5 -15 t-35.5 15t-14.5 35v50h-50q-21 0 -35.5 15t-14.5 35q0 21 14.5 35.5t35.5 14.5h535l48 200h-633q-32 0 -54.5 21t-27.5 43z" />
-<glyph unicode="&#xe117;" d="M0 0v800h1200v-800h-1200zM0 900v100h200q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5h500v-100h-1200z" />
-<glyph unicode="&#xe118;" d="M1 0l300 700h1200l-300 -700h-1200zM1 400v600h200q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5h500v-200h-1000z" />
-<glyph unicode="&#xe119;" d="M302 300h198v600h-198l298 300l298 -300h-198v-600h198l-298 -300z" />
-<glyph unicode="&#xe120;" d="M0 600l300 298v-198h600v198l300 -298l-300 -297v197h-600v-197z" />
-<glyph unicode="&#xe121;" d="M0 100v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM31 400l172 739q5 22 23 41.5t38 19.5h672q19 0 37.5 -22.5t23.5 -45.5l172 -732h-1138zM800 100h100v100h-100v-100z M1000 100h100v100h-100v-100z" />
-<glyph unicode="&#xe122;" d="M-101 600v50q0 24 25 49t50 38l25 13v-250l-11 5.5t-24 14t-30 21.5t-24 27.5t-11 31.5zM99 500v250v5q0 13 0.5 18.5t2.5 13t8 10.5t15 3h200l675 250v-850l-675 200h-38l47 -276q2 -12 -3 -17.5t-11 -6t-21 -0.5h-8h-83q-20 0 -34.5 14t-18.5 35q-56 337 -56 351z M1100 200v850q0 21 14.5 35.5t35.5 14.5q20 0 35 -14.5t15 -35.5v-850q0 -20 -15 -35t-35 -15q-21 0 -35.5 15t-14.5 35z" />
-<glyph unicode="&#xe123;" d="M74 350q0 21 13.5 35.5t33.5 14.5h17l118 173l63 327q15 77 76 140t144 83l-18 32q-6 19 3 32t29 13h94q20 0 29 -10.5t3 -29.5l-18 -37q83 -19 144 -82.5t76 -140.5l63 -327l118 -173h17q20 0 33.5 -14.5t13.5 -35.5q0 -20 -13 -40t-31 -27q-22 -9 -63 -23t-167.5 -37 t-251.5 -23t-245.5 20.5t-178.5 41.5l-58 20q-18 7 -31 27.5t-13 40.5zM497 110q12 -49 40 -79.5t63 -30.5t63 30.5t39 79.5q-48 -6 -102 -6t-103 6z" />
-<glyph unicode="&#xe124;" d="M21 445l233 -45l-78 -224l224 78l45 -233l155 179l155 -179l45 233l224 -78l-78 224l234 45l-180 155l180 156l-234 44l78 225l-224 -78l-45 233l-155 -180l-155 180l-45 -233l-224 78l78 -225l-233 -44l179 -156z" />
-<glyph unicode="&#xe125;" d="M0 200h200v600h-200v-600zM300 275q0 -75 100 -75h61q123 -100 139 -100h250q46 0 83 57l238 344q29 31 29 74v100q0 44 -30.5 84.5t-69.5 40.5h-328q28 118 28 125v150q0 44 -30.5 84.5t-69.5 40.5h-50q-27 0 -51 -20t-38 -48l-96 -198l-145 -196q-20 -26 -20 -63v-400z M400 300v375l150 212l100 213h50v-175l-50 -225h450v-125l-250 -375h-214l-136 100h-100z" />
-<glyph unicode="&#xe126;" d="M0 400v600h200v-600h-200zM300 525v400q0 75 100 75h61q123 100 139 100h250q46 0 83 -57l238 -344q29 -31 29 -74v-100q0 -44 -30.5 -84.5t-69.5 -40.5h-328q28 -118 28 -125v-150q0 -44 -30.5 -84.5t-69.5 -40.5h-50q-27 0 -51 20t-38 48l-96 198l-145 196 q-20 26 -20 63zM400 525l150 -212l100 -213h50v175l-50 225h450v125l-250 375h-214l-136 -100h-100v-375z" />
-<glyph unicode="&#xe127;" d="M8 200v600h200v-600h-200zM308 275v525q0 17 14 35.5t28 28.5l14 9l362 230q14 6 25 6q17 0 29 -12l109 -112q14 -14 14 -34q0 -18 -11 -32l-85 -121h302q85 0 138.5 -38t53.5 -110t-54.5 -111t-138.5 -39h-107l-130 -339q-7 -22 -20.5 -41.5t-28.5 -19.5h-341 q-7 0 -90 81t-83 94zM408 289l100 -89h293l131 339q6 21 19.5 41t28.5 20h203q16 0 25 15t9 36q0 20 -9 34.5t-25 14.5h-457h-6.5h-7.5t-6.5 0.5t-6 1t-5 1.5t-5.5 2.5t-4 4t-4 5.5q-5 12 -5 20q0 14 10 27l147 183l-86 83l-339 -236v-503z" />
-<glyph unicode="&#xe128;" d="M-101 651q0 72 54 110t139 37h302l-85 121q-11 16 -11 32q0 21 14 34l109 113q13 12 29 12q11 0 25 -6l365 -230q7 -4 16.5 -10.5t26 -26t16.5 -36.5v-526q0 -13 -85.5 -93.5t-93.5 -80.5h-342q-15 0 -28.5 20t-19.5 41l-131 339h-106q-84 0 -139 39t-55 111zM-1 601h222 q15 0 28.5 -20.5t19.5 -40.5l131 -339h293l106 89v502l-342 237l-87 -83l145 -184q10 -11 10 -26q0 -11 -5 -20q-1 -3 -3.5 -5.5l-4 -4t-5 -2.5t-5.5 -1.5t-6.5 -1t-6.5 -0.5h-7.5h-6.5h-476v-100zM999 201v600h200v-600h-200z" />
-<glyph unicode="&#xe129;" d="M97 719l230 -363q4 -6 10.5 -15.5t26 -25t36.5 -15.5h525q13 0 94 83t81 90v342q0 15 -20 28.5t-41 19.5l-339 131v106q0 84 -39 139t-111 55t-110 -53.5t-38 -138.5v-302l-121 84q-15 12 -33.5 11.5t-32.5 -13.5l-112 -110q-22 -22 -6 -53zM172 739l83 86l183 -146 q22 -18 47 -5q3 1 5.5 3.5l4 4t2.5 5t1.5 5.5t1 6.5t0.5 6v7.5v7v456q0 22 25 31t50 -0.5t25 -30.5v-202q0 -16 20 -29.5t41 -19.5l339 -130v-294l-89 -100h-503zM400 0v200h600v-200h-600z" />
-<glyph unicode="&#xe130;" d="M1 585q-15 -31 7 -53l112 -110q13 -13 32 -13.5t34 10.5l121 85l-1 -302q0 -84 38.5 -138t110.5 -54t111 55t39 139v106l339 131q20 6 40.5 19.5t20.5 28.5v342q0 7 -81 90t-94 83h-525q-17 0 -35.5 -14t-28.5 -28l-10 -15zM76 565l237 339h503l89 -100v-294l-340 -130 q-20 -6 -40 -20t-20 -29v-202q0 -22 -25 -31t-50 0t-25 31v456v14.5t-1.5 11.5t-5 12t-9.5 7q-24 13 -46 -5l-184 -146zM305 1104v200h600v-200h-600z" />
-<glyph unicode="&#xe131;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q162 0 299.5 -80t217.5 -218t80 -300t-80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 500h300l-2 -194l402 294l-402 298v-197h-298v-201z" />
-<glyph unicode="&#xe132;" d="M0 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t231.5 47.5q122 0 232.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-218 -217.5t-300 -80t-299.5 80t-217.5 217.5t-80 299.5zM200 600l400 -294v194h302v201h-300v197z" />
-<glyph unicode="&#xe133;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q121 0 231.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 600h200v-300h200v300h200l-300 400z" />
-<glyph unicode="&#xe134;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q121 0 231.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 600l300 -400l300 400h-200v300h-200v-300h-200z" />
-<glyph unicode="&#xe135;" d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q121 0 231.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM254 780q-8 -34 5.5 -93t7.5 -87q0 -9 17 -44t16 -60q12 0 23 -5.5 t23 -15t20 -13.5q20 -10 108 -42q22 -8 53 -31.5t59.5 -38.5t57.5 -11q8 -18 -15 -55.5t-20 -57.5q12 -21 22.5 -34.5t28 -27t36.5 -17.5q0 -6 -3 -15.5t-3.5 -14.5t4.5 -17q101 -2 221 111q31 30 47 48t34 49t21 62q-14 9 -37.5 9.5t-35.5 7.5q-14 7 -49 15t-52 19 q-9 0 -39.5 -0.5t-46.5 -1.5t-39 -6.5t-39 -16.5q-50 -35 -66 -12q-4 2 -3.5 25.5t0.5 25.5q-6 13 -26.5 17t-24.5 7q2 22 -2 41t-16.5 28t-38.5 -20q-23 -25 -42 4q-19 28 -8 58q8 16 22 22q6 -1 26 -1.5t33.5 -4.5t19.5 -13q12 -19 32 -37.5t34 -27.5l14 -8q0 3 9.5 39.5 t5.5 57.5q-4 23 14.5 44.5t22.5 31.5q5 14 10 35t8.5 31t15.5 22.5t34 21.5q-6 18 10 37q8 0 23.5 -1.5t24.5 -1.5t20.5 4.5t20.5 15.5q-10 23 -30.5 42.5t-38 30t-49 26.5t-43.5 23q11 41 1 44q31 -13 58.5 -14.5t39.5 3.5l11 4q6 36 -17 53.5t-64 28.5t-56 23 q-19 -3 -37 0q-15 -12 -36.5 -21t-34.5 -12t-44 -8t-39 -6q-15 -3 -46 0t-45 -3q-20 -6 -51.5 -25.5t-34.5 -34.5q-3 -11 6.5 -22.5t8.5 -18.5q-3 -34 -27.5 -91t-29.5 -79zM518 915q3 12 16 30.5t16 25.5q10 -10 18.5 -10t14 6t14.5 14.5t16 12.5q0 -18 8 -42.5t16.5 -44 t9.5 -23.5q-6 1 -39 5t-53.5 10t-36.5 16z" />
-<glyph unicode="&#xe136;" d="M0 164.5q0 21.5 15 37.5l600 599q-33 101 6 201.5t135 154.5q164 92 306 -9l-259 -138l145 -232l251 126q13 -175 -151 -267q-123 -70 -253 -23l-596 -596q-15 -16 -36.5 -16t-36.5 16l-111 110q-15 15 -15 36.5z" />
-<glyph unicode="&#xe137;" horiz-adv-x="1220" d="M0 196v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM0 596v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000 q-41 0 -70.5 29.5t-29.5 70.5zM0 996v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM600 596h500v100h-500v-100zM800 196h300v100h-300v-100zM900 996h200v100h-200v-100z" />
-<glyph unicode="&#xe138;" d="M100 1100v100h1000v-100h-1000zM150 1000h900l-350 -500v-300l-200 -200v500z" />
-<glyph unicode="&#xe139;" d="M0 200v200h1200v-200q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM0 500v400q0 41 29.5 70.5t70.5 29.5h300v100q0 41 29.5 70.5t70.5 29.5h200q41 0 70.5 -29.5t29.5 -70.5v-100h300q41 0 70.5 -29.5t29.5 -70.5v-400h-500v100h-200v-100h-500z M500 1000h200v100h-200v-100z" />
-<glyph unicode="&#xe140;" d="M0 0v400l129 -129l200 200l142 -142l-200 -200l129 -129h-400zM0 800l129 129l200 -200l142 142l-200 200l129 129h-400v-400zM729 329l142 142l200 -200l129 129v-400h-400l129 129zM729 871l200 200l-129 129h400v-400l-129 129l-200 -200z" />
-<glyph unicode="&#xe141;" d="M0 596q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM182 596q0 -172 121.5 -293t292.5 -121t292.5 121t121.5 293q0 171 -121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM291 655 q0 23 15.5 38.5t38.5 15.5t39 -16t16 -38q0 -23 -16 -39t-39 -16q-22 0 -38 16t-16 39zM400 850q0 22 16 38.5t39 16.5q22 0 38 -16t16 -39t-16 -39t-38 -16q-23 0 -39 16.5t-16 38.5zM513 609q0 32 21 56.5t52 29.5l122 126l1 1q-9 14 -9 28q0 22 16 38.5t39 16.5 q22 0 38 -16t16 -39t-16 -39t-38 -16q-16 0 -29 10l-55 -145q17 -22 17 -51q0 -36 -25.5 -61.5t-61.5 -25.5q-37 0 -62.5 25.5t-25.5 61.5zM800 655q0 22 16 38t39 16t38.5 -15.5t15.5 -38.5t-16 -39t-38 -16q-23 0 -39 16t-16 39z" />
-<glyph unicode="&#xe142;" d="M-40 375q-13 -95 35 -173q35 -57 94 -89t129 -32q63 0 119 28q33 16 65 40.5t52.5 45.5t59.5 64q40 44 57 61l394 394q35 35 47 84t-3 96q-27 87 -117 104q-20 2 -29 2q-46 0 -79.5 -17t-67.5 -51l-388 -396l-7 -7l69 -67l377 373q20 22 39 38q23 23 50 23q38 0 53 -36 q16 -39 -20 -75l-547 -547q-52 -52 -125 -52q-55 0 -100 33t-54 96q-5 35 2.5 66t31.5 63t42 50t56 54q24 21 44 41l348 348q52 52 82.5 79.5t84 54t107.5 26.5q25 0 48 -4q95 -17 154 -94.5t51 -175.5q-7 -101 -98 -192l-252 -249l-253 -256l7 -7l69 -60l517 511 q67 67 95 157t11 183q-16 87 -67 154t-130 103q-69 33 -152 33q-107 0 -197 -55q-40 -24 -111 -95l-512 -512q-68 -68 -81 -163z" />
-<glyph unicode="&#xe143;" d="M79 784q0 131 99 229.5t230 98.5q144 0 242 -129q103 129 245 129q130 0 227 -98.5t97 -229.5q0 -46 -17.5 -91t-61 -99t-77 -89.5t-104.5 -105.5q-197 -191 -293 -322l-17 -23l-16 23q-43 58 -100 122.5t-92 99.5t-101 100l-84.5 84.5t-68 74t-60 78t-33.5 70.5t-15 78z M250 784q0 -27 30.5 -70t61.5 -75.5t95 -94.5l22 -22q93 -90 190 -201q82 92 195 203l12 12q64 62 97.5 97t64.5 79t31 72q0 71 -48 119.5t-106 48.5q-73 0 -131 -83l-118 -171l-114 174q-51 80 -124 80q-59 0 -108.5 -49.5t-49.5 -118.5z" />
-<glyph unicode="&#xe144;" d="M57 353q0 -94 66 -160l141 -141q66 -66 159 -66q95 0 159 66l283 283q66 66 66 159t-66 159l-141 141q-12 12 -19 17l-105 -105l212 -212l-389 -389l-247 248l95 95l-18 18q-46 45 -75 101l-55 -55q-66 -66 -66 -159zM269 706q0 -93 66 -159l141 -141l19 -17l105 105 l-212 212l389 389l247 -247l-95 -96l18 -18q46 -46 77 -99l29 29q35 35 62.5 88t27.5 96q0 93 -66 159l-141 141q-66 66 -159 66q-95 0 -159 -66l-283 -283q-66 -64 -66 -159z" />
-<glyph unicode="&#xe145;" d="M200 100v953q0 21 30 46t81 48t129 38t163 15t162 -15t127 -38t79 -48t29 -46v-953q0 -41 -29.5 -70.5t-70.5 -29.5h-600q-41 0 -70.5 29.5t-29.5 70.5zM300 300h600v700h-600v-700zM496 150q0 -43 30.5 -73.5t73.5 -30.5t73.5 30.5t30.5 73.5t-30.5 73.5t-73.5 30.5 t-73.5 -30.5t-30.5 -73.5z" />
-<glyph unicode="&#xe146;" d="M0 0l303 380l207 208l-210 212h300l267 279l-35 36q-15 14 -15 35t15 35q14 15 35 15t35 -15l283 -282q15 -15 15 -36t-15 -35q-14 -15 -35 -15t-35 15l-36 35l-279 -267v-300l-212 210l-208 -207z" />
-<glyph unicode="&#xe148;" d="M295 433h139q5 -77 48.5 -126.5t117.5 -64.5v335l-27 7q-46 14 -79 26.5t-72 36t-62.5 52t-40 72.5t-16.5 99q0 92 44 159.5t109 101t144 40.5v78h100v-79q38 -4 72.5 -13.5t75.5 -31.5t71 -53.5t51.5 -84t24.5 -118.5h-159q-8 72 -35 109.5t-101 50.5v-307l64 -14 q34 -7 64 -16.5t70 -31.5t67.5 -52t47.5 -80.5t20 -112.5q0 -139 -89 -224t-244 -96v-77h-100v78q-152 17 -237 104q-40 40 -52.5 93.5t-15.5 139.5zM466 889q0 -29 8 -51t16.5 -34t29.5 -22.5t31 -13.5t38 -10q7 -2 11 -3v274q-61 -8 -97.5 -37.5t-36.5 -102.5zM700 237 q170 18 170 151q0 64 -44 99.5t-126 60.5v-311z" />
-<glyph unicode="&#xe149;" d="M100 600v100h166q-24 49 -44 104q-10 26 -14.5 55.5t-3 72.5t25 90t68.5 87q97 88 263 88q129 0 230 -89t101 -208h-153q0 52 -34 89.5t-74 51.5t-76 14q-37 0 -79 -14.5t-62 -35.5q-41 -44 -41 -101q0 -11 2.5 -24.5t5.5 -24t9.5 -26.5t10.5 -25t14 -27.5t14 -25.5 t15.5 -27t13.5 -24h242v-100h-197q8 -50 -2.5 -115t-31.5 -94q-41 -59 -99 -113q35 11 84 18t70 7q32 1 102 -16t104 -17q76 0 136 30l50 -147q-41 -25 -80.5 -36.5t-59 -13t-61.5 -1.5q-23 0 -128 33t-155 29q-39 -4 -82 -17t-66 -25l-24 -11l-55 145l16.5 11t15.5 10 t13.5 9.5t14.5 12t14.5 14t17.5 18.5q48 55 54 126.5t-30 142.5h-221z" />
-<glyph unicode="&#xe150;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM602 900l298 300l298 -300h-198v-900h-200v900h-198z" />
-<glyph unicode="&#xe151;" d="M2 300h198v900h200v-900h198l-298 -300zM700 0v200h100v-100h200v-100h-300zM700 400v100h300v-200h-99v-100h-100v100h99v100h-200zM700 700v500h300v-500h-100v100h-100v-100h-100zM801 900h100v200h-100v-200z" />
-<glyph unicode="&#xe152;" d="M2 300h198v900h200v-900h198l-298 -300zM700 0v500h300v-500h-100v100h-100v-100h-100zM700 700v200h100v-100h200v-100h-300zM700 1100v100h300v-200h-99v-100h-100v100h99v100h-200zM801 200h100v200h-100v-200z" />
-<glyph unicode="&#xe153;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM800 100v400h300v-500h-100v100h-200zM800 1100v100h200v-500h-100v400h-100zM901 200h100v200h-100v-200z" />
-<glyph unicode="&#xe154;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM800 400v100h200v-500h-100v400h-100zM800 800v400h300v-500h-100v100h-200zM901 900h100v200h-100v-200z" />
-<glyph unicode="&#xe155;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM700 100v200h500v-200h-500zM700 400v200h400v-200h-400zM700 700v200h300v-200h-300zM700 1000v200h200v-200h-200z" />
-<glyph unicode="&#xe156;" d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM700 100v200h200v-200h-200zM700 400v200h300v-200h-300zM700 700v200h400v-200h-400zM700 1000v200h500v-200h-500z" />
-<glyph unicode="&#xe157;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q162 0 281 -118.5t119 -281.5v-300q0 -165 -118.5 -282.5t-281.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500z" />
-<glyph unicode="&#xe158;" d="M0 400v300q0 163 119 281.5t281 118.5h300q165 0 282.5 -117.5t117.5 -282.5v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-163 0 -281.5 117.5t-118.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM400 300l333 250l-333 250v-500z" />
-<glyph unicode="&#xe159;" d="M0 400v300q0 163 117.5 281.5t282.5 118.5h300q163 0 281.5 -119t118.5 -281v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM300 700l250 -333l250 333h-500z" />
-<glyph unicode="&#xe160;" d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q165 0 282.5 -117.5t117.5 -282.5v-300q0 -162 -118.5 -281t-281.5 -119h-300q-165 0 -282.5 118.5t-117.5 281.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM300 400h500l-250 333z" />
-<glyph unicode="&#xe161;" d="M0 400v300h300v200l400 -350l-400 -350v200h-300zM500 0v200h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5h-500v200h400q165 0 282.5 -117.5t117.5 -282.5v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-400z" />
-<glyph unicode="&#xe162;" d="M216 519q10 -19 32 -19h302q-155 -438 -160 -458q-5 -21 4 -32l9 -8l9 -1q13 0 26 16l538 630q15 19 6 36q-8 18 -32 16h-300q1 4 78 219.5t79 227.5q2 17 -6 27l-8 8h-9q-16 0 -25 -15q-4 -5 -98.5 -111.5t-228 -257t-209.5 -238.5q-17 -19 -7 -40z" />
-<glyph unicode="&#xe163;" d="M0 400q0 -165 117.5 -282.5t282.5 -117.5h300q47 0 100 15v185h-500q-41 0 -70.5 29.5t-29.5 70.5v500q0 41 29.5 70.5t70.5 29.5h500v185q-14 4 -114 7.5t-193 5.5l-93 2q-165 0 -282.5 -117.5t-117.5 -282.5v-300zM600 400v300h300v200l400 -350l-400 -350v200h-300z " />
-<glyph unicode="&#xe164;" d="M0 400q0 -165 117.5 -282.5t282.5 -117.5h300q163 0 281.5 117.5t118.5 282.5v98l-78 73l-122 -123v-148q0 -41 -29.5 -70.5t-70.5 -29.5h-500q-41 0 -70.5 29.5t-29.5 70.5v500q0 41 29.5 70.5t70.5 29.5h156l118 122l-74 78h-100q-165 0 -282.5 -117.5t-117.5 -282.5 v-300zM496 709l353 342l-149 149h500v-500l-149 149l-342 -353z" />
-<glyph unicode="&#xe165;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM406 600 q0 80 57 137t137 57t137 -57t57 -137t-57 -137t-137 -57t-137 57t-57 137z" />
-<glyph unicode="&#xe166;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 800l445 -500l450 500h-295v400h-300v-400h-300zM900 150h100v50h-100v-50z" />
-<glyph unicode="&#xe167;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 700h300v-300h300v300h295l-445 500zM900 150h100v50h-100v-50z" />
-<glyph unicode="&#xe168;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 705l305 -305l596 596l-154 155l-442 -442l-150 151zM900 150h100v50h-100v-50z" />
-<glyph unicode="&#xe169;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 988l97 -98l212 213l-97 97zM200 401h700v699l-250 -239l-149 149l-212 -212l149 -149zM900 150h100v50h-100v-50z" />
-<glyph unicode="&#xe170;" d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM200 612l212 -212l98 97l-213 212zM300 1200l239 -250l-149 -149l212 -212l149 148l248 -237v700h-699zM900 150h100v50h-100v-50z" />
-<glyph unicode="&#xe171;" d="M23 415l1177 784v-1079l-475 272l-310 -393v416h-392zM494 210l672 938l-672 -712v-226z" />
-<glyph unicode="&#xe172;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-850q0 -21 -15 -35.5t-35 -14.5h-150v400h-700v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 1000h100v200h-100v-200z" />
-<glyph unicode="&#xe173;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-218l-276 -275l-120 120l-126 -127h-378v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM581 306l123 123l120 -120l353 352l123 -123l-475 -476zM600 1000h100v200h-100v-200z" />
-<glyph unicode="&#xe174;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-269l-103 -103l-170 170l-298 -298h-329v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 1000h100v200h-100v-200zM700 133l170 170l-170 170l127 127l170 -170l170 170l127 -128l-170 -169l170 -170 l-127 -127l-170 170l-170 -170z" />
-<glyph unicode="&#xe175;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-300h-400v-200h-500v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 300l300 -300l300 300h-200v300h-200v-300h-200zM600 1000v200h100v-200h-100z" />
-<glyph unicode="&#xe176;" d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-402l-200 200l-298 -298h-402v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 300h200v-300h200v300h200l-300 300zM600 1000v200h100v-200h-100z" />
-<glyph unicode="&#xe177;" d="M0 250q0 -21 14.5 -35.5t35.5 -14.5h1100q21 0 35.5 14.5t14.5 35.5v550h-1200v-550zM0 900h1200v150q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-150zM100 300v200h400v-200h-400z" />
-<glyph unicode="&#xe178;" d="M0 400l300 298v-198h400v-200h-400v-198zM100 800v200h100v-200h-100zM300 800v200h100v-200h-100zM500 800v200h400v198l300 -298l-300 -298v198h-400zM800 300v200h100v-200h-100zM1000 300h100v200h-100v-200z" />
-<glyph unicode="&#xe179;" d="M100 700v400l50 100l50 -100v-300h100v300l50 100l50 -100v-300h100v300l50 100l50 -100v-400l-100 -203v-447q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5v447zM800 597q0 -29 10.5 -55.5t25 -43t29 -28.5t25.5 -18l10 -5v-397q0 -21 14.5 -35.5 t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v1106q0 31 -18 40.5t-44 -7.5l-276 -117q-25 -16 -43.5 -50.5t-18.5 -65.5v-359z" />
-<glyph unicode="&#xe180;" d="M100 0h400v56q-75 0 -87.5 6t-12.5 44v394h500v-394q0 -38 -12.5 -44t-87.5 -6v-56h400v56q-4 0 -11 0.5t-24 3t-30 7t-24 15t-11 24.5v888q0 22 25 34.5t50 13.5l25 2v56h-400v-56q75 0 87.5 -6t12.5 -44v-394h-500v394q0 38 12.5 44t87.5 6v56h-400v-56q4 0 11 -0.5 t24 -3t30 -7t24 -15t11 -24.5v-888q0 -22 -25 -34.5t-50 -13.5l-25 -2v-56z" />
-<glyph unicode="&#xe181;" d="M0 300q0 -41 29.5 -70.5t70.5 -29.5h300q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5h-300q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM100 100h400l200 200h105l295 98v-298h-425l-100 -100h-375zM100 300v200h300v-200h-300zM100 600v200h300v-200h-300z M100 1000h400l200 -200v-98l295 98h105v200h-425l-100 100h-375zM700 402v163l400 133v-163z" />
-<glyph unicode="&#xe182;" d="M16.5 974.5q0.5 -21.5 16 -90t46.5 -140t104 -177.5t175 -208q103 -103 207.5 -176t180 -103.5t137 -47t92.5 -16.5l31 1l163 162q16 17 13 40.5t-22 37.5l-192 136q-19 14 -45 12t-42 -19l-119 -118q-143 103 -267 227q-126 126 -227 268l118 118q17 17 20 41.5 t-11 44.5l-139 194q-14 19 -36.5 22t-40.5 -14l-162 -162q-1 -11 -0.5 -32.5z" />
-<glyph unicode="&#xe183;" d="M0 50v212q0 20 10.5 45.5t24.5 39.5l365 303v50q0 4 1 10.5t12 22.5t30 28.5t60 23t97 10.5t97 -10t60 -23.5t30 -27.5t12 -24l1 -10v-50l365 -303q14 -14 24.5 -39.5t10.5 -45.5v-212q0 -21 -15 -35.5t-35 -14.5h-1100q-21 0 -35.5 14.5t-14.5 35.5zM0 712 q0 -21 14.5 -33.5t34.5 -8.5l202 33q20 4 34.5 21t14.5 38v146q141 24 300 24t300 -24v-146q0 -21 14.5 -38t34.5 -21l202 -33q20 -4 34.5 8.5t14.5 33.5v200q-6 8 -19 20.5t-63 45t-112 57t-171 45t-235 20.5q-92 0 -175 -10.5t-141.5 -27t-108.5 -36.5t-81.5 -40 t-53.5 -36.5t-31 -27.5l-9 -10v-200z" />
-<glyph unicode="&#xe184;" d="M100 0v100h1100v-100h-1100zM175 200h950l-125 150v250l100 100v400h-100v-200h-100v200h-200v-200h-100v200h-200v-200h-100v200h-100v-400l100 -100v-250z" />
-<glyph unicode="&#xe185;" d="M100 0h300v400q0 41 -29.5 70.5t-70.5 29.5h-100q-41 0 -70.5 -29.5t-29.5 -70.5v-400zM500 0v1000q0 41 29.5 70.5t70.5 29.5h100q41 0 70.5 -29.5t29.5 -70.5v-1000h-300zM900 0v700q0 41 29.5 70.5t70.5 29.5h100q41 0 70.5 -29.5t29.5 -70.5v-700h-300z" />
-<glyph unicode="&#xe186;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v300h-200v100h200v100h-300v-300h200v-100h-200v-100zM600 300h200v100h100v300h-100v100h-200v-500 zM700 400v300h100v-300h-100z" />
-<glyph unicode="&#xe187;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h100v200h100v-200h100v500h-100v-200h-100v200h-100v-500zM600 300h200v100h100v300h-100v100h-200v-500 zM700 400v300h100v-300h-100z" />
-<glyph unicode="&#xe188;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v100h-200v300h200v100h-300v-500zM600 300h300v100h-200v300h200v100h-300v-500z" />
-<glyph unicode="&#xe189;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 550l300 -150v300zM600 400l300 150l-300 150v-300z" />
-<glyph unicode="&#xe190;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300v500h700v-500h-700zM300 400h130q41 0 68 42t27 107t-28.5 108t-66.5 43h-130v-300zM575 549 q0 -65 27 -107t68 -42h130v300h-130q-38 0 -66.5 -43t-28.5 -108z" />
-<glyph unicode="&#xe191;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v300h-200v100h200v100h-300v-300h200v-100h-200v-100zM601 300h100v100h-100v-100zM700 700h100 v-400h100v500h-200v-100z" />
-<glyph unicode="&#xe192;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v400h-200v100h-100v-500zM301 400v200h100v-200h-100zM601 300h100v100h-100v-100zM700 700h100 v-400h100v500h-200v-100z" />
-<glyph unicode="&#xe193;" d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 700v100h300v-300h-99v-100h-100v100h99v200h-200zM201 300v100h100v-100h-100zM601 300v100h100v-100h-100z M700 700v100h200v-500h-100v400h-100z" />
-<glyph unicode="&#xe194;" d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM400 500v200 l100 100h300v-100h-300v-200h300v-100h-300z" />
-<glyph unicode="&#xe195;" d="M0 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM182 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM400 400v400h300 l100 -100v-100h-100v100h-200v-100h200v-100h-200v-100h-100zM700 400v100h100v-100h-100z" />
-<glyph unicode="&#xe197;" d="M-14 494q0 -80 56.5 -137t135.5 -57h222v300h400v-300h128q120 0 205 86t85 208q0 120 -85 206.5t-205 86.5q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5zM300 200h200v300h200v-300 h200l-300 -300z" />
-<glyph unicode="&#xe198;" d="M-14 494q0 -80 56.5 -137t135.5 -57h8l414 414l403 -403q94 26 154.5 104t60.5 178q0 121 -85 207.5t-205 86.5q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5zM300 200l300 300 l300 -300h-200v-300h-200v300h-200z" />
-<glyph unicode="&#xe199;" d="M100 200h400v-155l-75 -45h350l-75 45v155h400l-270 300h170l-270 300h170l-300 333l-300 -333h170l-270 -300h170z" />
-<glyph unicode="&#xe200;" d="M121 700q0 -53 28.5 -97t75.5 -65q-4 -16 -4 -38q0 -74 52.5 -126.5t126.5 -52.5q56 0 100 30v-306l-75 -45h350l-75 45v306q46 -30 100 -30q74 0 126.5 52.5t52.5 126.5q0 24 -9 55q50 32 79.5 83t29.5 112q0 90 -61.5 155.5t-150.5 71.5q-26 89 -99.5 145.5 t-167.5 56.5q-116 0 -197.5 -81.5t-81.5 -197.5q0 -4 1 -12t1 -11q-14 2 -23 2q-74 0 -126.5 -52.5t-52.5 -126.5z" />
-</font>
-</defs></svg> 
\ No newline at end of file
+  <metadata></metadata>
+  <defs>
+    <font horiz-adv-x="1200" id="glyphicons_halflingsregular">
+      <font-face ascent="960" descent="-240" units-per-em="1200"/>
+      <missing-glyph horiz-adv-x="500"/>
+      <glyph/>
+      <glyph/>
+      <glyph unicode="&#xd;"/>
+      <glyph unicode=" "/>
+      <glyph
+        d="M100 500v200h259l-183 183l141 141l183 -183v259h200v-259l183 183l141 -141l-183 -183h259v-200h-259l183 -183l-141 -141l-183 183v-259h-200v259l-183 -183l-141 141l183 183h-259z"
+        unicode="*"/>
+      <glyph d="M0 400v300h400v400h300v-400h400v-300h-400v-400h-300v400h-400z" unicode="+"/>
+      <glyph unicode="&#xa0;"/>
+      <glyph horiz-adv-x="652" unicode="&#x2000;"/>
+      <glyph horiz-adv-x="1304" unicode="&#x2001;"/>
+      <glyph horiz-adv-x="652" unicode="&#x2002;"/>
+      <glyph horiz-adv-x="1304" unicode="&#x2003;"/>
+      <glyph horiz-adv-x="434" unicode="&#x2004;"/>
+      <glyph horiz-adv-x="326" unicode="&#x2005;"/>
+      <glyph horiz-adv-x="217" unicode="&#x2006;"/>
+      <glyph horiz-adv-x="217" unicode="&#x2007;"/>
+      <glyph horiz-adv-x="163" unicode="&#x2008;"/>
+      <glyph horiz-adv-x="260" unicode="&#x2009;"/>
+      <glyph horiz-adv-x="72" unicode="&#x200a;"/>
+      <glyph horiz-adv-x="260" unicode="&#x202f;"/>
+      <glyph horiz-adv-x="326" unicode="&#x205f;"/>
+      <glyph
+        d="M100 500l100 100h113q0 47 5 100h-218l100 100h135q37 167 112 257q117 141 297 141q242 0 354 -189q60 -103 66 -209h-181q0 55 -25.5 99t-63.5 68t-75 36.5t-67 12.5q-24 0 -52.5 -10t-62.5 -32t-65.5 -67t-50.5 -107h379l-100 -100h-300q-6 -46 -6 -100h406l-100 -100 h-300q9 -74 33 -132t52.5 -91t62 -54.5t59 -29t46.5 -7.5q29 0 66 13t75 37t63.5 67.5t25.5 96.5h174q-31 -172 -128 -278q-107 -117 -274 -117q-205 0 -324 158q-36 46 -69 131.5t-45 205.5h-217z"
+        unicode="&#x20ac;"/>
+      <glyph d="M200 400h900v300h-900v-300z" unicode="&#x2212;"/>
+      <glyph
+        d="M-14 494q0 -80 56.5 -137t135.5 -57h750q120 0 205 86t85 208q0 120 -85 206.5t-205 86.5q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5z"
+        unicode="&#x2601;"/>
+      <glyph
+        d="M0 100l400 400l200 -200l200 200l400 -400h-1200zM0 300v600l300 -300zM0 1100l600 -603l600 603h-1200zM900 600l300 300v-600z"
+        unicode="&#x2709;"/>
+      <glyph
+        d="M-13 -13l333 112l-223 223zM187 403l214 -214l614 614l-214 214zM887 1103l214 -214l99 92q13 13 13 32.5t-13 33.5l-153 153q-15 13 -33 13t-33 -13z"
+        unicode="&#x270f;"/>
+      <glyph d="M0 0z" horiz-adv-x="500" unicode="&#xe000;"/>
+      <glyph d="M0 1200h1200l-500 -550v-550h300v-100h-800v100h300v550z" unicode="&#xe001;"/>
+      <glyph
+        d="M14 84q18 -55 86 -75.5t147 5.5q65 21 109 69t44 90v606l600 155v-521q-64 16 -138 -7q-79 -26 -122.5 -83t-25.5 -111q17 -55 85.5 -75.5t147.5 4.5q70 23 111.5 63.5t41.5 95.5v881q0 10 -7 15.5t-17 2.5l-752 -193q-10 -3 -17 -12.5t-7 -19.5v-689q-64 17 -138 -7 q-79 -25 -122.5 -82t-25.5 -112z"
+        unicode="&#xe002;"/>
+      <glyph
+        d="M23 693q0 200 142 342t342 142t342 -142t142 -342q0 -142 -78 -261l300 -300q7 -8 7 -18t-7 -18l-109 -109q-8 -7 -18 -7t-18 7l-300 300q-119 -78 -261 -78q-200 0 -342 142t-142 342zM176 693q0 -136 97 -233t234 -97t233.5 96.5t96.5 233.5t-96.5 233.5t-233.5 96.5 t-234 -97t-97 -233z"
+        unicode="&#xe003;"/>
+      <glyph
+        d="M100 784q0 64 28 123t73 100.5t104.5 64t119 20.5t120 -38.5t104.5 -104.5q48 69 109.5 105t121.5 38t118.5 -20.5t102.5 -64t71 -100.5t27 -123q0 -57 -33.5 -117.5t-94 -124.5t-126.5 -127.5t-150 -152.5t-146 -174q-62 85 -145.5 174t-149.5 152.5t-126.5 127.5 t-94 124.5t-33.5 117.5z"
+        unicode="&#xe005;"/>
+      <glyph
+        d="M-72 800h479l146 400h2l146 -400h472l-382 -278l145 -449l-384 275l-382 -275l146 447zM168 71l2 1z"
+        unicode="&#xe006;"/>
+      <glyph
+        d="M-72 800h479l146 400h2l146 -400h472l-382 -278l145 -449l-384 275l-382 -275l146 447zM168 71l2 1zM237 700l196 -142l-73 -226l192 140l195 -141l-74 229l193 140h-235l-77 211l-78 -211h-239z"
+        unicode="&#xe007;"/>
+      <glyph
+        d="M0 0v143l400 257v100q-37 0 -68.5 74.5t-31.5 125.5v200q0 124 88 212t212 88t212 -88t88 -212v-200q0 -51 -31.5 -125.5t-68.5 -74.5v-100l400 -257v-143h-1200z"
+        unicode="&#xe008;"/>
+      <glyph
+        d="M0 0v1100h1200v-1100h-1200zM100 100h100v100h-100v-100zM100 300h100v100h-100v-100zM100 500h100v100h-100v-100zM100 700h100v100h-100v-100zM100 900h100v100h-100v-100zM300 100h600v400h-600v-400zM300 600h600v400h-600v-400zM1000 100h100v100h-100v-100z M1000 300h100v100h-100v-100zM1000 500h100v100h-100v-100zM1000 700h100v100h-100v-100zM1000 900h100v100h-100v-100z"
+        unicode="&#xe009;"/>
+      <glyph
+        d="M0 50v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5zM0 650v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400 q-21 0 -35.5 14.5t-14.5 35.5zM600 50v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5zM600 650v400q0 21 14.5 35.5t35.5 14.5h400q21 0 35.5 -14.5t14.5 -35.5v-400 q0 -21 -14.5 -35.5t-35.5 -14.5h-400q-21 0 -35.5 14.5t-14.5 35.5z"
+        unicode="&#xe010;"/>
+      <glyph
+        d="M0 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM0 450v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200 q-21 0 -35.5 14.5t-14.5 35.5zM0 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5 t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 450v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5 v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 450v200q0 21 14.5 35.5t35.5 14.5h200 q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM800 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5z"
+        unicode="&#xe011;"/>
+      <glyph
+        d="M0 50v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM0 450q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v200q0 21 -14.5 35.5t-35.5 14.5h-200q-21 0 -35.5 -14.5 t-14.5 -35.5v-200zM0 850v200q0 21 14.5 35.5t35.5 14.5h200q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5zM400 50v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5 t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5zM400 450v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5zM400 850v200q0 21 14.5 35.5t35.5 14.5h700q21 0 35.5 -14.5t14.5 -35.5 v-200q0 -21 -14.5 -35.5t-35.5 -14.5h-700q-21 0 -35.5 14.5t-14.5 35.5z"
+        unicode="&#xe012;"/>
+      <glyph d="M29 454l419 -420l818 820l-212 212l-607 -607l-206 207z" unicode="&#xe013;"/>
+      <glyph
+        d="M106 318l282 282l-282 282l212 212l282 -282l282 282l212 -212l-282 -282l282 -282l-212 -212l-282 282l-282 -282z"
+        unicode="&#xe014;"/>
+      <glyph
+        d="M23 693q0 200 142 342t342 142t342 -142t142 -342q0 -142 -78 -261l300 -300q7 -8 7 -18t-7 -18l-109 -109q-8 -7 -18 -7t-18 7l-300 300q-119 -78 -261 -78q-200 0 -342 142t-142 342zM176 693q0 -136 97 -233t234 -97t233.5 96.5t96.5 233.5t-96.5 233.5t-233.5 96.5 t-234 -97t-97 -233zM300 600v200h100v100h200v-100h100v-200h-100v-100h-200v100h-100z"
+        unicode="&#xe015;"/>
+      <glyph
+        d="M23 694q0 200 142 342t342 142t342 -142t142 -342q0 -141 -78 -262l300 -299q7 -7 7 -18t-7 -18l-109 -109q-8 -8 -18 -8t-18 8l-300 299q-120 -77 -261 -77q-200 0 -342 142t-142 342zM176 694q0 -136 97 -233t234 -97t233.5 97t96.5 233t-96.5 233t-233.5 97t-234 -97 t-97 -233zM300 601h400v200h-400v-200z"
+        unicode="&#xe016;"/>
+      <glyph
+        d="M23 600q0 183 105 331t272 210v-166q-103 -55 -165 -155t-62 -220q0 -177 125 -302t302 -125t302 125t125 302q0 120 -62 220t-165 155v166q167 -62 272 -210t105 -331q0 -118 -45.5 -224.5t-123 -184t-184 -123t-224.5 -45.5t-224.5 45.5t-184 123t-123 184t-45.5 224.5 zM500 750q0 -21 14.5 -35.5t35.5 -14.5h100q21 0 35.5 14.5t14.5 35.5v400q0 21 -14.5 35.5t-35.5 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-400z"
+        unicode="&#xe017;"/>
+      <glyph
+        d="M100 1h200v300h-200v-300zM400 1v500h200v-500h-200zM700 1v800h200v-800h-200zM1000 1v1200h200v-1200h-200z"
+        unicode="&#xe018;"/>
+      <glyph
+        d="M26 601q0 -33 6 -74l151 -38l2 -6q14 -49 38 -93l3 -5l-80 -134q45 -59 105 -105l133 81l5 -3q45 -26 94 -39l5 -2l38 -151q40 -5 74 -5q27 0 74 5l38 151l6 2q46 13 93 39l5 3l134 -81q56 44 104 105l-80 134l3 5q24 44 39 93l1 6l152 38q5 40 5 74q0 28 -5 73l-152 38 l-1 6q-16 51 -39 93l-3 5l80 134q-44 58 -104 105l-134 -81l-5 3q-45 25 -93 39l-6 1l-38 152q-40 5 -74 5q-27 0 -74 -5l-38 -152l-5 -1q-50 -14 -94 -39l-5 -3l-133 81q-59 -47 -105 -105l80 -134l-3 -5q-25 -47 -38 -93l-2 -6l-151 -38q-6 -48 -6 -73zM385 601 q0 88 63 151t152 63t152 -63t63 -151q0 -89 -63 -152t-152 -63t-152 63t-63 152z"
+        unicode="&#xe019;"/>
+      <glyph
+        d="M100 1025v50q0 10 7.5 17.5t17.5 7.5h275v100q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5v-100h275q10 0 17.5 -7.5t7.5 -17.5v-50q0 -11 -7 -18t-18 -7h-1050q-11 0 -18 7t-7 18zM200 100v800h900v-800q0 -41 -29.5 -71t-70.5 -30h-700q-41 0 -70.5 30 t-29.5 71zM300 100h100v700h-100v-700zM500 100h100v700h-100v-700zM500 1100h300v100h-300v-100zM700 100h100v700h-100v-700zM900 100h100v700h-100v-700z"
+        unicode="&#xe020;"/>
+      <glyph d="M1 601l656 644l644 -644h-200v-600h-300v400h-300v-400h-300v600h-200z"
+        unicode="&#xe021;"/>
+      <glyph
+        d="M100 25v1150q0 11 7 18t18 7h475v-500h400v-675q0 -11 -7 -18t-18 -7h-850q-11 0 -18 7t-7 18zM700 800v300l300 -300h-300z"
+        unicode="&#xe022;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM500 500v400h100 v-300h200v-100h-300z"
+        unicode="&#xe023;"/>
+      <glyph
+        d="M-100 0l431 1200h209l-21 -300h162l-20 300h208l431 -1200h-538l-41 400h-242l-40 -400h-539zM488 500h224l-27 300h-170z"
+        unicode="&#xe024;"/>
+      <glyph
+        d="M0 0v400h490l-290 300h200v500h300v-500h200l-290 -300h490v-400h-1100zM813 200h175v100h-175v-100z"
+        unicode="&#xe025;"/>
+      <glyph
+        d="M1 600q0 122 47.5 233t127.5 191t191 127.5t233 47.5t233 -47.5t191 -127.5t127.5 -191t47.5 -233t-47.5 -233t-127.5 -191t-191 -127.5t-233 -47.5t-233 47.5t-191 127.5t-127.5 191t-47.5 233zM188 600q0 -170 121 -291t291 -121t291 121t121 291t-121 291t-291 121 t-291 -121t-121 -291zM350 600h150v300h200v-300h150l-250 -300z"
+        unicode="&#xe026;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM350 600l250 300 l250 -300h-150v-300h-200v300h-150z"
+        unicode="&#xe027;"/>
+      <glyph
+        d="M0 25v475l200 700h800q199 -700 200 -700v-475q0 -11 -7 -18t-18 -7h-1150q-11 0 -18 7t-7 18zM200 500h200l50 -200h300l50 200h200l-97 500h-606z"
+        unicode="&#xe028;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -172 121.5 -293t292.5 -121t292.5 121t121.5 293q0 171 -121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM500 397v401 l297 -200z"
+        unicode="&#xe029;"/>
+      <glyph
+        d="M23 600q0 -118 45.5 -224.5t123 -184t184 -123t224.5 -45.5t224.5 45.5t184 123t123 184t45.5 224.5h-150q0 -177 -125 -302t-302 -125t-302 125t-125 302t125 302t302 125q136 0 246 -81l-146 -146h400v400l-145 -145q-157 122 -355 122q-118 0 -224.5 -45.5t-184 -123 t-123 -184t-45.5 -224.5z"
+        unicode="&#xe030;"/>
+      <glyph
+        d="M23 600q0 118 45.5 224.5t123 184t184 123t224.5 45.5q198 0 355 -122l145 145v-400h-400l147 147q-112 80 -247 80q-177 0 -302 -125t-125 -302h-150zM100 0v400h400l-147 -147q112 -80 247 -80q177 0 302 125t125 302h150q0 -118 -45.5 -224.5t-123 -184t-184 -123 t-224.5 -45.5q-198 0 -355 122z"
+        unicode="&#xe031;"/>
+      <glyph
+        d="M100 0h1100v1200h-1100v-1200zM200 100v900h900v-900h-900zM300 200v100h100v-100h-100zM300 400v100h100v-100h-100zM300 600v100h100v-100h-100zM300 800v100h100v-100h-100zM500 200h500v100h-500v-100zM500 400v100h500v-100h-500zM500 600v100h500v-100h-500z M500 800v100h500v-100h-500z"
+        unicode="&#xe032;"/>
+      <glyph
+        d="M0 100v600q0 41 29.5 70.5t70.5 29.5h100v200q0 82 59 141t141 59h300q82 0 141 -59t59 -141v-200h100q41 0 70.5 -29.5t29.5 -70.5v-600q0 -41 -29.5 -70.5t-70.5 -29.5h-900q-41 0 -70.5 29.5t-29.5 70.5zM400 800h300v150q0 21 -14.5 35.5t-35.5 14.5h-200 q-21 0 -35.5 -14.5t-14.5 -35.5v-150z"
+        unicode="&#xe033;"/>
+      <glyph
+        d="M100 0v1100h100v-1100h-100zM300 400q60 60 127.5 84t127.5 17.5t122 -23t119 -30t110 -11t103 42t91 120.5v500q-40 -81 -101.5 -115.5t-127.5 -29.5t-138 25t-139.5 40t-125.5 25t-103 -29.5t-65 -115.5v-500z"
+        unicode="&#xe034;"/>
+      <glyph
+        d="M0 275q0 -11 7 -18t18 -7h50q11 0 18 7t7 18v300q0 127 70.5 231.5t184.5 161.5t245 57t245 -57t184.5 -161.5t70.5 -231.5v-300q0 -11 7 -18t18 -7h50q11 0 18 7t7 18v300q0 116 -49.5 227t-131 192.5t-192.5 131t-227 49.5t-227 -49.5t-192.5 -131t-131 -192.5 t-49.5 -227v-300zM200 20v460q0 8 6 14t14 6h160q8 0 14 -6t6 -14v-460q0 -8 -6 -14t-14 -6h-160q-8 0 -14 6t-6 14zM800 20v460q0 8 6 14t14 6h160q8 0 14 -6t6 -14v-460q0 -8 -6 -14t-14 -6h-160q-8 0 -14 6t-6 14z"
+        unicode="&#xe035;"/>
+      <glyph
+        d="M0 400h300l300 -200v800l-300 -200h-300v-400zM688 459l141 141l-141 141l71 71l141 -141l141 141l71 -71l-141 -141l141 -141l-71 -71l-141 141l-141 -141z"
+        unicode="&#xe036;"/>
+      <glyph
+        d="M0 400h300l300 -200v800l-300 -200h-300v-400zM700 857l69 53q111 -135 111 -310q0 -169 -106 -302l-67 54q86 110 86 248q0 146 -93 257z"
+        unicode="&#xe037;"/>
+      <glyph
+        d="M0 401v400h300l300 200v-800l-300 200h-300zM702 858l69 53q111 -135 111 -310q0 -170 -106 -303l-67 55q86 110 86 248q0 145 -93 257zM889 951l7 -8q123 -151 123 -344q0 -189 -119 -339l-7 -8l81 -66l6 8q142 178 142 405q0 230 -144 408l-6 8z"
+        unicode="&#xe038;"/>
+      <glyph
+        d="M0 0h500v500h-200v100h-100v-100h-200v-500zM0 600h100v100h400v100h100v100h-100v300h-500v-600zM100 100v300h300v-300h-300zM100 800v300h300v-300h-300zM200 200v100h100v-100h-100zM200 900h100v100h-100v-100zM500 500v100h300v-300h200v-100h-100v-100h-200v100 h-100v100h100v200h-200zM600 0v100h100v-100h-100zM600 1000h100v-300h200v-300h300v200h-200v100h200v500h-600v-200zM800 800v300h300v-300h-300zM900 0v100h300v-100h-300zM900 900v100h100v-100h-100zM1100 200v100h100v-100h-100z"
+        unicode="&#xe039;"/>
+      <glyph
+        d="M0 200h100v1000h-100v-1000zM100 0v100h300v-100h-300zM200 200v1000h100v-1000h-100zM500 0v91h100v-91h-100zM500 200v1000h200v-1000h-200zM700 0v91h100v-91h-100zM800 200v1000h100v-1000h-100zM900 0v91h200v-91h-200zM1000 200v1000h200v-1000h-200z"
+        unicode="&#xe040;"/>
+      <glyph
+        d="M1 700v475q0 10 7.5 17.5t17.5 7.5h474l700 -700l-500 -500zM148 953q0 -42 29 -71q30 -30 71.5 -30t71.5 30q29 29 29 71t-29 71q-30 30 -71.5 30t-71.5 -30q-29 -29 -29 -71z"
+        unicode="&#xe041;"/>
+      <glyph
+        d="M2 700v475q0 11 7 18t18 7h474l700 -700l-500 -500zM148 953q0 -42 30 -71q29 -30 71 -30t71 30q30 29 30 71t-30 71q-29 30 -71 30t-71 -30q-30 -29 -30 -71zM701 1200h100l700 -700l-500 -500l-50 50l450 450z"
+        unicode="&#xe042;"/>
+      <glyph d="M100 0v1025l175 175h925v-1000l-100 -100v1000h-750l-100 -100h750v-1000h-900z"
+        unicode="&#xe043;"/>
+      <glyph
+        d="M200 0l450 444l450 -443v1150q0 20 -14.5 35t-35.5 15h-800q-21 0 -35.5 -15t-14.5 -35v-1151z"
+        unicode="&#xe044;"/>
+      <glyph
+        d="M0 100v700h200l100 -200h600l100 200h200v-700h-200v200h-800v-200h-200zM253 829l40 -124h592l62 124l-94 346q-2 11 -10 18t-18 7h-450q-10 0 -18 -7t-10 -18zM281 24l38 152q2 10 11.5 17t19.5 7h500q10 0 19.5 -7t11.5 -17l38 -152q2 -10 -3.5 -17t-15.5 -7h-600 q-10 0 -15.5 7t-3.5 17z"
+        unicode="&#xe045;"/>
+      <glyph
+        d="M0 200q0 -41 29.5 -70.5t70.5 -29.5h1000q41 0 70.5 29.5t29.5 70.5v600q0 41 -29.5 70.5t-70.5 29.5h-150q-4 8 -11.5 21.5t-33 48t-53 61t-69 48t-83.5 21.5h-200q-41 0 -82 -20.5t-70 -50t-52 -59t-34 -50.5l-12 -20h-150q-41 0 -70.5 -29.5t-29.5 -70.5v-600z M356 500q0 100 72 172t172 72t172 -72t72 -172t-72 -172t-172 -72t-172 72t-72 172zM494 500q0 -44 31 -75t75 -31t75 31t31 75t-31 75t-75 31t-75 -31t-31 -75zM900 700v100h100v-100h-100z"
+        unicode="&#xe046;"/>
+      <glyph
+        d="M53 0h365v66q-41 0 -72 11t-49 38t1 71l92 234h391l82 -222q16 -45 -5.5 -88.5t-74.5 -43.5v-66h417v66q-34 1 -74 43q-18 19 -33 42t-21 37l-6 13l-385 998h-93l-399 -1006q-24 -48 -52 -75q-12 -12 -33 -25t-36 -20l-15 -7v-66zM416 521l178 457l46 -140l116 -317h-340 z"
+        unicode="&#xe047;"/>
+      <glyph
+        d="M100 0v89q41 7 70.5 32.5t29.5 65.5v827q0 28 -1 39.5t-5.5 26t-15.5 21t-29 14t-49 14.5v70h471q120 0 213 -88t93 -228q0 -55 -11.5 -101.5t-28 -74t-33.5 -47.5t-28 -28l-12 -7q8 -3 21.5 -9t48 -31.5t60.5 -58t47.5 -91.5t21.5 -129q0 -84 -59 -156.5t-142 -111 t-162 -38.5h-500zM400 200h161q89 0 153 48.5t64 132.5q0 90 -62.5 154.5t-156.5 64.5h-159v-400zM400 700h139q76 0 130 61.5t54 138.5q0 82 -84 130.5t-239 48.5v-379z"
+        unicode="&#xe048;"/>
+      <glyph
+        d="M200 0v57q77 7 134.5 40.5t65.5 80.5l173 849q10 56 -10 74t-91 37q-6 1 -10.5 2.5t-9.5 2.5v57h425l2 -57q-33 -8 -62 -25.5t-46 -37t-29.5 -38t-17.5 -30.5l-5 -12l-128 -825q-10 -52 14 -82t95 -36v-57h-500z"
+        unicode="&#xe049;"/>
+      <glyph
+        d="M-75 200h75v800h-75l125 167l125 -167h-75v-800h75l-125 -167zM300 900v300h150h700h150v-300h-50q0 29 -8 48.5t-18.5 30t-33.5 15t-39.5 5.5t-50.5 1h-200v-850l100 -50v-100h-400v100l100 50v850h-200q-34 0 -50.5 -1t-40 -5.5t-33.5 -15t-18.5 -30t-8.5 -48.5h-49z "
+        unicode="&#xe050;"/>
+      <glyph
+        d="M33 51l167 125v-75h800v75l167 -125l-167 -125v75h-800v-75zM100 901v300h150h700h150v-300h-50q0 29 -8 48.5t-18 30t-33.5 15t-40 5.5t-50.5 1h-200v-650l100 -50v-100h-400v100l100 50v650h-200q-34 0 -50.5 -1t-39.5 -5.5t-33.5 -15t-18.5 -30t-8 -48.5h-50z"
+        unicode="&#xe051;"/>
+      <glyph
+        d="M0 50q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 350q0 -20 14.5 -35t35.5 -15h800q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-800q-21 0 -35.5 -14.5t-14.5 -35.5 v-100zM0 650q0 -20 14.5 -35t35.5 -15h1000q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1000q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 950q0 -20 14.5 -35t35.5 -15h600q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-600q-21 0 -35.5 -14.5 t-14.5 -35.5v-100z"
+        unicode="&#xe052;"/>
+      <glyph
+        d="M0 50q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM0 650q0 -20 14.5 -35t35.5 -15h1100q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5 v-100zM200 350q0 -20 14.5 -35t35.5 -15h700q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-700q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM200 950q0 -20 14.5 -35t35.5 -15h700q21 0 35.5 15t14.5 35v100q0 21 -14.5 35.5t-35.5 14.5h-700q-21 0 -35.5 -14.5 t-14.5 -35.5v-100z"
+        unicode="&#xe053;"/>
+      <glyph
+        d="M0 50v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM100 650v100q0 21 14.5 35.5t35.5 14.5h1000q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1000q-21 0 -35.5 15 t-14.5 35zM300 350v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM500 950v100q0 21 14.5 35.5t35.5 14.5h600q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-600 q-21 0 -35.5 15t-14.5 35z"
+        unicode="&#xe054;"/>
+      <glyph
+        d="M0 50v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM0 350v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15 t-14.5 35zM0 650v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100q-21 0 -35.5 15t-14.5 35zM0 950v100q0 21 14.5 35.5t35.5 14.5h1100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-1100 q-21 0 -35.5 15t-14.5 35z"
+        unicode="&#xe055;"/>
+      <glyph
+        d="M0 50v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15t-14.5 35zM0 350v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15 t-14.5 35zM0 650v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15t-14.5 35zM0 950v100q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-100q-21 0 -35.5 15 t-14.5 35zM300 50v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM300 350v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800 q-21 0 -35.5 15t-14.5 35zM300 650v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15h-800q-21 0 -35.5 15t-14.5 35zM300 950v100q0 21 14.5 35.5t35.5 14.5h800q21 0 35.5 -14.5t14.5 -35.5v-100q0 -20 -14.5 -35t-35.5 -15 h-800q-21 0 -35.5 15t-14.5 35z"
+        unicode="&#xe056;"/>
+      <glyph
+        d="M-101 500v100h201v75l166 -125l-166 -125v75h-201zM300 0h100v1100h-100v-1100zM500 50q0 -20 14.5 -35t35.5 -15h600q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-600q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 350q0 -20 14.5 -35t35.5 -15h300q20 0 35 15t15 35 v100q0 21 -15 35.5t-35 14.5h-300q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 650q0 -20 14.5 -35t35.5 -15h500q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-500q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM500 950q0 -20 14.5 -35t35.5 -15h100q20 0 35 15t15 35v100 q0 21 -15 35.5t-35 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-100z"
+        unicode="&#xe057;"/>
+      <glyph
+        d="M1 50q0 -20 14.5 -35t35.5 -15h600q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-600q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 350q0 -20 14.5 -35t35.5 -15h300q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-300q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 650 q0 -20 14.5 -35t35.5 -15h500q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-500q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM1 950q0 -20 14.5 -35t35.5 -15h100q20 0 35 15t15 35v100q0 21 -15 35.5t-35 14.5h-100q-21 0 -35.5 -14.5t-14.5 -35.5v-100zM801 0v1100h100v-1100 h-100zM934 550l167 -125v75h200v100h-200v75z"
+        unicode="&#xe058;"/>
+      <glyph
+        d="M0 275v650q0 31 22 53t53 22h750q31 0 53 -22t22 -53v-650q0 -31 -22 -53t-53 -22h-750q-31 0 -53 22t-22 53zM900 600l300 300v-600z"
+        unicode="&#xe059;"/>
+      <glyph
+        d="M0 44v1012q0 18 13 31t31 13h1112q19 0 31.5 -13t12.5 -31v-1012q0 -18 -12.5 -31t-31.5 -13h-1112q-18 0 -31 13t-13 31zM100 263l247 182l298 -131l-74 156l293 318l236 -288v500h-1000v-737zM208 750q0 56 39 95t95 39t95 -39t39 -95t-39 -95t-95 -39t-95 39t-39 95z "
+        unicode="&#xe060;"/>
+      <glyph
+        d="M148 745q0 124 60.5 231.5t165 172t226.5 64.5q123 0 227 -63t164.5 -169.5t60.5 -229.5t-73 -272q-73 -114 -166.5 -237t-150.5 -189l-57 -66q-10 9 -27 26t-66.5 70.5t-96 109t-104 135.5t-100.5 155q-63 139 -63 262zM342 772q0 -107 75.5 -182.5t181.5 -75.5 q107 0 182.5 75.5t75.5 182.5t-75.5 182t-182.5 75t-182 -75.5t-75 -181.5z"
+        unicode="&#xe062;"/>
+      <glyph
+        d="M1 600q0 122 47.5 233t127.5 191t191 127.5t233 47.5t233 -47.5t191 -127.5t127.5 -191t47.5 -233t-47.5 -233t-127.5 -191t-191 -127.5t-233 -47.5t-233 47.5t-191 127.5t-127.5 191t-47.5 233zM173 600q0 -177 125.5 -302t301.5 -125v854q-176 0 -301.5 -125 t-125.5 -302z"
+        unicode="&#xe063;"/>
+      <glyph
+        d="M117 406q0 94 34 186t88.5 172.5t112 159t115 177t87.5 194.5q21 -71 57.5 -142.5t76 -130.5t83 -118.5t82 -117t70 -116t50 -125.5t18.5 -136q0 -89 -39 -165.5t-102 -126.5t-140 -79.5t-156 -33.5q-114 6 -211.5 53t-161.5 138.5t-64 210.5zM243 414q14 -82 59.5 -136 t136.5 -80l16 98q-7 6 -18 17t-34 48t-33 77q-15 73 -14 143.5t10 122.5l9 51q-92 -110 -119.5 -185t-12.5 -156z"
+        unicode="&#xe064;"/>
+      <glyph
+        d="M0 400v300q0 165 117.5 282.5t282.5 117.5q366 -6 397 -14l-186 -186h-311q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v125l200 200v-225q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5 t-117.5 282.5zM436 341l161 50l412 412l-114 113l-405 -405zM995 1015l113 -113l113 113l-21 85l-92 28z"
+        unicode="&#xe065;"/>
+      <glyph
+        d="M0 400v300q0 165 117.5 282.5t282.5 117.5h261l2 -80q-133 -32 -218 -120h-145q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5l200 153v-53q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5 zM423 524q30 38 81.5 64t103 35.5t99 14t77.5 3.5l29 -1v-209l360 324l-359 318v-216q-7 0 -19 -1t-48 -8t-69.5 -18.5t-76.5 -37t-76.5 -59t-62 -88t-39.5 -121.5z"
+        unicode="&#xe066;"/>
+      <glyph
+        d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q60 0 127 -23l-178 -177h-349q-41 0 -70.5 -29.5t-29.5 -70.5v-500q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v69l200 200v-169q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5 t-117.5 282.5zM342 632l283 -284l566 567l-136 137l-430 -431l-147 147z"
+        unicode="&#xe067;"/>
+      <glyph
+        d="M0 603l300 296v-198h200v200h-200l300 300l295 -300h-195v-200h200v198l300 -296l-300 -300v198h-200v-200h195l-295 -300l-300 300h200v200h-200v-198z"
+        unicode="&#xe068;"/>
+      <glyph
+        d="M200 50v1000q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-437l500 487v-1100l-500 488v-438q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5z"
+        unicode="&#xe069;"/>
+      <glyph
+        d="M0 50v1000q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-437l500 487v-487l500 487v-1100l-500 488v-488l-500 488v-438q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5z"
+        unicode="&#xe070;"/>
+      <glyph d="M136 550l564 550v-487l500 487v-1100l-500 488v-488z" unicode="&#xe071;"/>
+      <glyph d="M200 0l900 550l-900 550v-1100z" unicode="&#xe072;"/>
+      <glyph
+        d="M200 150q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v800q0 21 -14.5 35.5t-35.5 14.5h-200q-21 0 -35.5 -14.5t-14.5 -35.5v-800zM600 150q0 -21 14.5 -35.5t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v800q0 21 -14.5 35.5t-35.5 14.5h-200 q-21 0 -35.5 -14.5t-14.5 -35.5v-800z"
+        unicode="&#xe073;"/>
+      <glyph
+        d="M200 150q0 -20 14.5 -35t35.5 -15h800q21 0 35.5 15t14.5 35v800q0 21 -14.5 35.5t-35.5 14.5h-800q-21 0 -35.5 -14.5t-14.5 -35.5v-800z"
+        unicode="&#xe074;"/>
+      <glyph d="M0 0v1100l500 -487v487l564 -550l-564 -550v488z" unicode="&#xe075;"/>
+      <glyph
+        d="M0 0v1100l500 -487v487l500 -487v437q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-1000q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5v438l-500 -488v488z"
+        unicode="&#xe076;"/>
+      <glyph
+        d="M300 0v1100l500 -487v437q0 21 14.5 35.5t35.5 14.5h100q21 0 35.5 -14.5t14.5 -35.5v-1000q0 -21 -14.5 -35.5t-35.5 -14.5h-100q-21 0 -35.5 14.5t-14.5 35.5v438z"
+        unicode="&#xe077;"/>
+      <glyph
+        d="M100 250v100q0 21 14.5 35.5t35.5 14.5h1000q21 0 35.5 -14.5t14.5 -35.5v-100q0 -21 -14.5 -35.5t-35.5 -14.5h-1000q-21 0 -35.5 14.5t-14.5 35.5zM100 500h1100l-550 564z"
+        unicode="&#xe078;"/>
+      <glyph d="M185 599l592 -592l240 240l-353 353l353 353l-240 240z" unicode="&#xe079;"/>
+      <glyph d="M272 194l353 353l-353 353l241 240l572 -571l21 -22l-1 -1v-1l-592 -591z"
+        unicode="&#xe080;"/>
+      <glyph
+        d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM300 500h200v-200h200v200h200v200h-200v200h-200v-200h-200v-200z"
+        unicode="&#xe081;"/>
+      <glyph
+        d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM300 500h600v200h-600v-200z"
+        unicode="&#xe082;"/>
+      <glyph
+        d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM246 459l213 -213l141 142l141 -142l213 213l-142 141l142 141l-213 212l-141 -141l-141 142l-212 -213l141 -141z"
+        unicode="&#xe083;"/>
+      <glyph
+        d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -299.5t-217.5 -217.5t-299.5 -80t-299.5 80t-217.5 217.5t-80 299.5zM270 551l276 -277l411 411l-175 174l-236 -236l-102 102z"
+        unicode="&#xe084;"/>
+      <glyph
+        d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM363 700h144q4 0 11.5 -1t11 -1t6.5 3t3 9t1 11t3.5 8.5t3.5 6t5.5 4t6.5 2.5t9 1.5t9 0.5h11.5h12.5q19 0 30 -10t11 -26 q0 -22 -4 -28t-27 -22q-5 -1 -12.5 -3t-27 -13.5t-34 -27t-26.5 -46t-11 -68.5h200q5 3 14 8t31.5 25.5t39.5 45.5t31 69t14 94q0 51 -17.5 89t-42 58t-58.5 32t-58.5 15t-51.5 3q-105 0 -172 -56t-67 -183zM500 300h200v100h-200v-100z"
+        unicode="&#xe085;"/>
+      <glyph
+        d="M3 600q0 162 80 299.5t217.5 217.5t299.5 80t299.5 -80t217.5 -217.5t80 -299.5t-80 -300t-217.5 -218t-299.5 -80t-299.5 80t-217.5 218t-80 300zM400 300h400v100h-100v300h-300v-100h100v-200h-100v-100zM500 800h200v100h-200v-100z"
+        unicode="&#xe086;"/>
+      <glyph
+        d="M0 500v200h194q15 60 36 104.5t55.5 86t88 69t126.5 40.5v200h200v-200q54 -20 113 -60t112.5 -105.5t71.5 -134.5h203v-200h-203q-25 -102 -116.5 -186t-180.5 -117v-197h-200v197q-140 27 -208 102.5t-98 200.5h-194zM290 500q24 -73 79.5 -127.5t130.5 -78.5v206h200 v-206q149 48 201 206h-201v200h200q-25 74 -76 127.5t-124 76.5v-204h-200v203q-75 -24 -130 -77.5t-79 -125.5h209v-200h-210z"
+        unicode="&#xe087;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM356 465l135 135 l-135 135l109 109l135 -135l135 135l109 -109l-135 -135l135 -135l-109 -109l-135 135l-135 -135z"
+        unicode="&#xe088;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM322 537l141 141 l87 -87l204 205l142 -142l-346 -345z"
+        unicode="&#xe089;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -115 62 -215l568 567q-100 62 -216 62q-171 0 -292.5 -121.5t-121.5 -292.5zM391 245q97 -59 209 -59q171 0 292.5 121.5t121.5 292.5 q0 112 -59 209z"
+        unicode="&#xe090;"/>
+      <glyph d="M0 547l600 453v-300h600v-300h-600v-301z" unicode="&#xe091;"/>
+      <glyph d="M0 400v300h600v300l600 -453l-600 -448v301h-600z" unicode="&#xe092;"/>
+      <glyph d="M204 600l450 600l444 -600h-298v-600h-300v600h-296z" unicode="&#xe093;"/>
+      <glyph d="M104 600h296v600h300v-600h298l-449 -600z" unicode="&#xe094;"/>
+      <glyph
+        d="M0 200q6 132 41 238.5t103.5 193t184 138t271.5 59.5v271l600 -453l-600 -448v301q-95 -2 -183 -20t-170 -52t-147 -92.5t-100 -135.5z"
+        unicode="&#xe095;"/>
+      <glyph
+        d="M0 0v400l129 -129l294 294l142 -142l-294 -294l129 -129h-400zM635 777l142 -142l294 294l129 -129v400h-400l129 -129z"
+        unicode="&#xe096;"/>
+      <glyph
+        d="M34 176l295 295l-129 129h400v-400l-129 130l-295 -295zM600 600v400l129 -129l295 295l142 -141l-295 -295l129 -130h-400z"
+        unicode="&#xe097;"/>
+      <glyph
+        d="M23 600q0 118 45.5 224.5t123 184t184 123t224.5 45.5t224.5 -45.5t184 -123t123 -184t45.5 -224.5t-45.5 -224.5t-123 -184t-184 -123t-224.5 -45.5t-224.5 45.5t-184 123t-123 184t-45.5 224.5zM456 851l58 -302q4 -20 21.5 -34.5t37.5 -14.5h54q20 0 37.5 14.5 t21.5 34.5l58 302q4 20 -8 34.5t-33 14.5h-207q-20 0 -32 -14.5t-8 -34.5zM500 300h200v100h-200v-100z"
+        unicode="&#xe101;"/>
+      <glyph
+        d="M0 800h100v-200h400v300h200v-300h400v200h100v100h-111v6t-1 15t-3 18l-34 172q-11 39 -41.5 63t-69.5 24q-32 0 -61 -17l-239 -144q-22 -13 -40 -35q-19 24 -40 36l-238 144q-33 18 -62 18q-39 0 -69.5 -23t-40.5 -61l-35 -177q-2 -8 -3 -18t-1 -15v-6h-111v-100z M100 0h400v400h-400v-400zM200 900q-3 0 14 48t35 96l18 47l214 -191h-281zM700 0v400h400v-400h-400zM731 900l202 197q5 -12 12 -32.5t23 -64t25 -72t7 -28.5h-269z"
+        unicode="&#xe102;"/>
+      <glyph
+        d="M0 -22v143l216 193q-9 53 -13 83t-5.5 94t9 113t38.5 114t74 124q47 60 99.5 102.5t103 68t127.5 48t145.5 37.5t184.5 43.5t220 58.5q0 -189 -22 -343t-59 -258t-89 -181.5t-108.5 -120t-122 -68t-125.5 -30t-121.5 -1.5t-107.5 12.5t-87.5 17t-56.5 7.5l-99 -55z M238.5 300.5q19.5 -6.5 86.5 76.5q55 66 367 234q70 38 118.5 69.5t102 79t99 111.5t86.5 148q22 50 24 60t-6 19q-7 5 -17 5t-26.5 -14.5t-33.5 -39.5q-35 -51 -113.5 -108.5t-139.5 -89.5l-61 -32q-369 -197 -458 -401q-48 -111 -28.5 -117.5z"
+        unicode="&#xe103;"/>
+      <glyph
+        d="M111 408q0 -33 5 -63q9 -56 44 -119.5t105 -108.5q31 -21 64 -16t62 23.5t57 49.5t48 61.5t35 60.5q32 66 39 184.5t-13 157.5q79 -80 122 -164t26 -184q-5 -33 -20.5 -69.5t-37.5 -80.5q-10 -19 -14.5 -29t-12 -26t-9 -23.5t-3 -19t2.5 -15.5t11 -9.5t19.5 -5t30.5 2.5 t42 8q57 20 91 34t87.5 44.5t87 64t65.5 88.5t47 122q38 172 -44.5 341.5t-246.5 278.5q22 -44 43 -129q39 -159 -32 -154q-15 2 -33 9q-79 33 -120.5 100t-44 175.5t48.5 257.5q-13 -8 -34 -23.5t-72.5 -66.5t-88.5 -105.5t-60 -138t-8 -166.5q2 -12 8 -41.5t8 -43t6 -39.5 t3.5 -39.5t-1 -33.5t-6 -31.5t-13.5 -24t-21 -20.5t-31 -12q-38 -10 -67 13t-40.5 61.5t-15 81.5t10.5 75q-52 -46 -83.5 -101t-39 -107t-7.5 -85z"
+        unicode="&#xe104;"/>
+      <glyph
+        d="M-61 600l26 40q6 10 20 30t49 63.5t74.5 85.5t97 90t116.5 83.5t132.5 59t145.5 23.5t145.5 -23.5t132.5 -59t116.5 -83.5t97 -90t74.5 -85.5t49 -63.5t20 -30l26 -40l-26 -40q-6 -10 -20 -30t-49 -63.5t-74.5 -85.5t-97 -90t-116.5 -83.5t-132.5 -59t-145.5 -23.5 t-145.5 23.5t-132.5 59t-116.5 83.5t-97 90t-74.5 85.5t-49 63.5t-20 30zM120 600q7 -10 40.5 -58t56 -78.5t68 -77.5t87.5 -75t103 -49.5t125 -21.5t123.5 20t100.5 45.5t85.5 71.5t66.5 75.5t58 81.5t47 66q-1 1 -28.5 37.5t-42 55t-43.5 53t-57.5 63.5t-58.5 54 q49 -74 49 -163q0 -124 -88 -212t-212 -88t-212 88t-88 212q0 85 46 158q-102 -87 -226 -258zM377 656q49 -124 154 -191l105 105q-37 24 -75 72t-57 84l-20 36z"
+        unicode="&#xe105;"/>
+      <glyph
+        d="M-61 600l26 40q6 10 20 30t49 63.5t74.5 85.5t97 90t116.5 83.5t132.5 59t145.5 23.5q61 0 121 -17l37 142h148l-314 -1200h-148l37 143q-82 21 -165 71.5t-140 102t-109.5 112t-72 88.5t-29.5 43zM120 600q210 -282 393 -336l37 141q-107 18 -178.5 101.5t-71.5 193.5 q0 85 46 158q-102 -87 -226 -258zM377 656q49 -124 154 -191l47 47l23 87q-30 28 -59 69t-44 68l-14 26zM780 161l38 145q22 15 44.5 34t46 44t40.5 44t41 50.5t33.5 43.5t33 44t24.5 34q-97 127 -140 175l39 146q67 -54 131.5 -125.5t87.5 -103.5t36 -52l26 -40l-26 -40 q-7 -12 -25.5 -38t-63.5 -79.5t-95.5 -102.5t-124 -100t-146.5 -79z"
+        unicode="&#xe106;"/>
+      <glyph
+        d="M-97.5 34q13.5 -34 50.5 -34h1294q37 0 50.5 35.5t-7.5 67.5l-642 1056q-20 33 -48 36t-48 -29l-642 -1066q-21 -32 -7.5 -66zM155 200l445 723l445 -723h-345v100h-200v-100h-345zM500 600l100 -300l100 300v100h-200v-100z"
+        unicode="&#xe107;"/>
+      <glyph
+        d="M100 262v41q0 20 11 44.5t26 38.5l363 325v339q0 62 44 106t106 44t106 -44t44 -106v-339l363 -325q15 -14 26 -38.5t11 -44.5v-41q0 -20 -12 -26.5t-29 5.5l-359 249v-263q100 -91 100 -113v-64q0 -21 -13 -29t-32 1l-94 78h-222l-94 -78q-19 -9 -32 -1t-13 29v64 q0 22 100 113v263l-359 -249q-17 -12 -29 -5.5t-12 26.5z"
+        unicode="&#xe108;"/>
+      <glyph
+        d="M0 50q0 -20 14.5 -35t35.5 -15h1000q21 0 35.5 15t14.5 35v750h-1100v-750zM0 900h1100v150q0 21 -14.5 35.5t-35.5 14.5h-150v100h-100v-100h-500v100h-100v-100h-150q-21 0 -35.5 -14.5t-14.5 -35.5v-150zM100 100v100h100v-100h-100zM100 300v100h100v-100h-100z M100 500v100h100v-100h-100zM300 100v100h100v-100h-100zM300 300v100h100v-100h-100zM300 500v100h100v-100h-100zM500 100v100h100v-100h-100zM500 300v100h100v-100h-100zM500 500v100h100v-100h-100zM700 100v100h100v-100h-100zM700 300v100h100v-100h-100zM700 500 v100h100v-100h-100zM900 100v100h100v-100h-100zM900 300v100h100v-100h-100zM900 500v100h100v-100h-100z"
+        unicode="&#xe109;"/>
+      <glyph
+        d="M0 200v200h259l600 600h241v198l300 -295l-300 -300v197h-159l-600 -600h-341zM0 800h259l122 -122l141 142l-181 180h-341v-200zM678 381l141 142l122 -123h159v198l300 -295l-300 -300v197h-241z"
+        unicode="&#xe110;"/>
+      <glyph
+        d="M0 400v600q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-600q0 -41 -29.5 -70.5t-70.5 -29.5h-596l-304 -300v300h-100q-41 0 -70.5 29.5t-29.5 70.5z"
+        unicode="&#xe111;"/>
+      <glyph
+        d="M100 600v200h300v-250q0 -113 6 -145q17 -92 102 -117q39 -11 92 -11q37 0 66.5 5.5t50 15.5t36 24t24 31.5t14 37.5t7 42t2.5 45t0 47v25v250h300v-200q0 -42 -3 -83t-15 -104t-31.5 -116t-58 -109.5t-89 -96.5t-129 -65.5t-174.5 -25.5t-174.5 25.5t-129 65.5t-89 96.5 t-58 109.5t-31.5 116t-15 104t-3 83zM100 900v300h300v-300h-300zM800 900v300h300v-300h-300z"
+        unicode="&#xe112;"/>
+      <glyph d="M-30 411l227 -227l352 353l353 -353l226 227l-578 579z" unicode="&#xe113;"/>
+      <glyph d="M70 797l580 -579l578 579l-226 227l-353 -353l-352 353z" unicode="&#xe114;"/>
+      <glyph
+        d="M-198 700l299 283l300 -283h-203v-400h385l215 -200h-800v600h-196zM402 1000l215 -200h381v-400h-198l299 -283l299 283h-200v600h-796z"
+        unicode="&#xe115;"/>
+      <glyph
+        d="M18 939q-5 24 10 42q14 19 39 19h896l38 162q5 17 18.5 27.5t30.5 10.5h94q20 0 35 -14.5t15 -35.5t-15 -35.5t-35 -14.5h-54l-201 -961q-2 -4 -6 -10.5t-19 -17.5t-33 -11h-31v-50q0 -20 -14.5 -35t-35.5 -15t-35.5 15t-14.5 35v50h-300v-50q0 -20 -14.5 -35t-35.5 -15 t-35.5 15t-14.5 35v50h-50q-21 0 -35.5 15t-14.5 35q0 21 14.5 35.5t35.5 14.5h535l48 200h-633q-32 0 -54.5 21t-27.5 43z"
+        unicode="&#xe116;"/>
+      <glyph
+        d="M0 0v800h1200v-800h-1200zM0 900v100h200q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5h500v-100h-1200z"
+        unicode="&#xe117;"/>
+      <glyph
+        d="M1 0l300 700h1200l-300 -700h-1200zM1 400v600h200q0 41 29.5 70.5t70.5 29.5h300q41 0 70.5 -29.5t29.5 -70.5h500v-200h-1000z"
+        unicode="&#xe118;"/>
+      <glyph d="M302 300h198v600h-198l298 300l298 -300h-198v-600h198l-298 -300z"
+        unicode="&#xe119;"/>
+      <glyph d="M0 600l300 298v-198h600v198l300 -298l-300 -297v197h-600v-197z" unicode="&#xe120;"/>
+      <glyph
+        d="M0 100v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM31 400l172 739q5 22 23 41.5t38 19.5h672q19 0 37.5 -22.5t23.5 -45.5l172 -732h-1138zM800 100h100v100h-100v-100z M1000 100h100v100h-100v-100z"
+        unicode="&#xe121;"/>
+      <glyph
+        d="M-101 600v50q0 24 25 49t50 38l25 13v-250l-11 5.5t-24 14t-30 21.5t-24 27.5t-11 31.5zM99 500v250v5q0 13 0.5 18.5t2.5 13t8 10.5t15 3h200l675 250v-850l-675 200h-38l47 -276q2 -12 -3 -17.5t-11 -6t-21 -0.5h-8h-83q-20 0 -34.5 14t-18.5 35q-56 337 -56 351z M1100 200v850q0 21 14.5 35.5t35.5 14.5q20 0 35 -14.5t15 -35.5v-850q0 -20 -15 -35t-35 -15q-21 0 -35.5 15t-14.5 35z"
+        unicode="&#xe122;"/>
+      <glyph
+        d="M74 350q0 21 13.5 35.5t33.5 14.5h17l118 173l63 327q15 77 76 140t144 83l-18 32q-6 19 3 32t29 13h94q20 0 29 -10.5t3 -29.5l-18 -37q83 -19 144 -82.5t76 -140.5l63 -327l118 -173h17q20 0 33.5 -14.5t13.5 -35.5q0 -20 -13 -40t-31 -27q-22 -9 -63 -23t-167.5 -37 t-251.5 -23t-245.5 20.5t-178.5 41.5l-58 20q-18 7 -31 27.5t-13 40.5zM497 110q12 -49 40 -79.5t63 -30.5t63 30.5t39 79.5q-48 -6 -102 -6t-103 6z"
+        unicode="&#xe123;"/>
+      <glyph
+        d="M21 445l233 -45l-78 -224l224 78l45 -233l155 179l155 -179l45 233l224 -78l-78 224l234 45l-180 155l180 156l-234 44l78 225l-224 -78l-45 233l-155 -180l-155 180l-45 -233l-224 78l78 -225l-233 -44l179 -156z"
+        unicode="&#xe124;"/>
+      <glyph
+        d="M0 200h200v600h-200v-600zM300 275q0 -75 100 -75h61q123 -100 139 -100h250q46 0 83 57l238 344q29 31 29 74v100q0 44 -30.5 84.5t-69.5 40.5h-328q28 118 28 125v150q0 44 -30.5 84.5t-69.5 40.5h-50q-27 0 -51 -20t-38 -48l-96 -198l-145 -196q-20 -26 -20 -63v-400z M400 300v375l150 212l100 213h50v-175l-50 -225h450v-125l-250 -375h-214l-136 100h-100z"
+        unicode="&#xe125;"/>
+      <glyph
+        d="M0 400v600h200v-600h-200zM300 525v400q0 75 100 75h61q123 100 139 100h250q46 0 83 -57l238 -344q29 -31 29 -74v-100q0 -44 -30.5 -84.5t-69.5 -40.5h-328q28 -118 28 -125v-150q0 -44 -30.5 -84.5t-69.5 -40.5h-50q-27 0 -51 20t-38 48l-96 198l-145 196 q-20 26 -20 63zM400 525l150 -212l100 -213h50v175l-50 225h450v125l-250 375h-214l-136 -100h-100v-375z"
+        unicode="&#xe126;"/>
+      <glyph
+        d="M8 200v600h200v-600h-200zM308 275v525q0 17 14 35.5t28 28.5l14 9l362 230q14 6 25 6q17 0 29 -12l109 -112q14 -14 14 -34q0 -18 -11 -32l-85 -121h302q85 0 138.5 -38t53.5 -110t-54.5 -111t-138.5 -39h-107l-130 -339q-7 -22 -20.5 -41.5t-28.5 -19.5h-341 q-7 0 -90 81t-83 94zM408 289l100 -89h293l131 339q6 21 19.5 41t28.5 20h203q16 0 25 15t9 36q0 20 -9 34.5t-25 14.5h-457h-6.5h-7.5t-6.5 0.5t-6 1t-5 1.5t-5.5 2.5t-4 4t-4 5.5q-5 12 -5 20q0 14 10 27l147 183l-86 83l-339 -236v-503z"
+        unicode="&#xe127;"/>
+      <glyph
+        d="M-101 651q0 72 54 110t139 37h302l-85 121q-11 16 -11 32q0 21 14 34l109 113q13 12 29 12q11 0 25 -6l365 -230q7 -4 16.5 -10.5t26 -26t16.5 -36.5v-526q0 -13 -85.5 -93.5t-93.5 -80.5h-342q-15 0 -28.5 20t-19.5 41l-131 339h-106q-84 0 -139 39t-55 111zM-1 601h222 q15 0 28.5 -20.5t19.5 -40.5l131 -339h293l106 89v502l-342 237l-87 -83l145 -184q10 -11 10 -26q0 -11 -5 -20q-1 -3 -3.5 -5.5l-4 -4t-5 -2.5t-5.5 -1.5t-6.5 -1t-6.5 -0.5h-7.5h-6.5h-476v-100zM999 201v600h200v-600h-200z"
+        unicode="&#xe128;"/>
+      <glyph
+        d="M97 719l230 -363q4 -6 10.5 -15.5t26 -25t36.5 -15.5h525q13 0 94 83t81 90v342q0 15 -20 28.5t-41 19.5l-339 131v106q0 84 -39 139t-111 55t-110 -53.5t-38 -138.5v-302l-121 84q-15 12 -33.5 11.5t-32.5 -13.5l-112 -110q-22 -22 -6 -53zM172 739l83 86l183 -146 q22 -18 47 -5q3 1 5.5 3.5l4 4t2.5 5t1.5 5.5t1 6.5t0.5 6v7.5v7v456q0 22 25 31t50 -0.5t25 -30.5v-202q0 -16 20 -29.5t41 -19.5l339 -130v-294l-89 -100h-503zM400 0v200h600v-200h-600z"
+        unicode="&#xe129;"/>
+      <glyph
+        d="M1 585q-15 -31 7 -53l112 -110q13 -13 32 -13.5t34 10.5l121 85l-1 -302q0 -84 38.5 -138t110.5 -54t111 55t39 139v106l339 131q20 6 40.5 19.5t20.5 28.5v342q0 7 -81 90t-94 83h-525q-17 0 -35.5 -14t-28.5 -28l-10 -15zM76 565l237 339h503l89 -100v-294l-340 -130 q-20 -6 -40 -20t-20 -29v-202q0 -22 -25 -31t-50 0t-25 31v456v14.5t-1.5 11.5t-5 12t-9.5 7q-24 13 -46 -5l-184 -146zM305 1104v200h600v-200h-600z"
+        unicode="&#xe130;"/>
+      <glyph
+        d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q162 0 299.5 -80t217.5 -218t80 -300t-80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 500h300l-2 -194l402 294l-402 298v-197h-298v-201z"
+        unicode="&#xe131;"/>
+      <glyph
+        d="M0 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t231.5 47.5q122 0 232.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-218 -217.5t-300 -80t-299.5 80t-217.5 217.5t-80 299.5zM200 600l400 -294v194h302v201h-300v197z"
+        unicode="&#xe132;"/>
+      <glyph
+        d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q121 0 231.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 600h200v-300h200v300h200l-300 400z"
+        unicode="&#xe133;"/>
+      <glyph
+        d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q121 0 231.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM300 600l300 -400l300 400h-200v300h-200v-300h-200z"
+        unicode="&#xe134;"/>
+      <glyph
+        d="M5 597q0 122 47.5 232.5t127.5 190.5t190.5 127.5t232.5 47.5q121 0 231.5 -47.5t190.5 -127.5t127.5 -190.5t47.5 -232.5q0 -162 -80 -299.5t-217.5 -217.5t-299.5 -80t-300 80t-218 217.5t-80 299.5zM254 780q-8 -34 5.5 -93t7.5 -87q0 -9 17 -44t16 -60q12 0 23 -5.5 t23 -15t20 -13.5q20 -10 108 -42q22 -8 53 -31.5t59.5 -38.5t57.5 -11q8 -18 -15 -55.5t-20 -57.5q12 -21 22.5 -34.5t28 -27t36.5 -17.5q0 -6 -3 -15.5t-3.5 -14.5t4.5 -17q101 -2 221 111q31 30 47 48t34 49t21 62q-14 9 -37.5 9.5t-35.5 7.5q-14 7 -49 15t-52 19 q-9 0 -39.5 -0.5t-46.5 -1.5t-39 -6.5t-39 -16.5q-50 -35 -66 -12q-4 2 -3.5 25.5t0.5 25.5q-6 13 -26.5 17t-24.5 7q2 22 -2 41t-16.5 28t-38.5 -20q-23 -25 -42 4q-19 28 -8 58q8 16 22 22q6 -1 26 -1.5t33.5 -4.5t19.5 -13q12 -19 32 -37.5t34 -27.5l14 -8q0 3 9.5 39.5 t5.5 57.5q-4 23 14.5 44.5t22.5 31.5q5 14 10 35t8.5 31t15.5 22.5t34 21.5q-6 18 10 37q8 0 23.5 -1.5t24.5 -1.5t20.5 4.5t20.5 15.5q-10 23 -30.5 42.5t-38 30t-49 26.5t-43.5 23q11 41 1 44q31 -13 58.5 -14.5t39.5 3.5l11 4q6 36 -17 53.5t-64 28.5t-56 23 q-19 -3 -37 0q-15 -12 -36.5 -21t-34.5 -12t-44 -8t-39 -6q-15 -3 -46 0t-45 -3q-20 -6 -51.5 -25.5t-34.5 -34.5q-3 -11 6.5 -22.5t8.5 -18.5q-3 -34 -27.5 -91t-29.5 -79zM518 915q3 12 16 30.5t16 25.5q10 -10 18.5 -10t14 6t14.5 14.5t16 12.5q0 -18 8 -42.5t16.5 -44 t9.5 -23.5q-6 1 -39 5t-53.5 10t-36.5 16z"
+        unicode="&#xe135;"/>
+      <glyph
+        d="M0 164.5q0 21.5 15 37.5l600 599q-33 101 6 201.5t135 154.5q164 92 306 -9l-259 -138l145 -232l251 126q13 -175 -151 -267q-123 -70 -253 -23l-596 -596q-15 -16 -36.5 -16t-36.5 16l-111 110q-15 15 -15 36.5z"
+        unicode="&#xe136;"/>
+      <glyph
+        d="M0 196v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM0 596v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000 q-41 0 -70.5 29.5t-29.5 70.5zM0 996v100q0 41 29.5 70.5t70.5 29.5h1000q41 0 70.5 -29.5t29.5 -70.5v-100q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM600 596h500v100h-500v-100zM800 196h300v100h-300v-100zM900 996h200v100h-200v-100z"
+        horiz-adv-x="1220"
+        unicode="&#xe137;"/>
+      <glyph d="M100 1100v100h1000v-100h-1000zM150 1000h900l-350 -500v-300l-200 -200v500z"
+        unicode="&#xe138;"/>
+      <glyph
+        d="M0 200v200h1200v-200q0 -41 -29.5 -70.5t-70.5 -29.5h-1000q-41 0 -70.5 29.5t-29.5 70.5zM0 500v400q0 41 29.5 70.5t70.5 29.5h300v100q0 41 29.5 70.5t70.5 29.5h200q41 0 70.5 -29.5t29.5 -70.5v-100h300q41 0 70.5 -29.5t29.5 -70.5v-400h-500v100h-200v-100h-500z M500 1000h200v100h-200v-100z"
+        unicode="&#xe139;"/>
+      <glyph
+        d="M0 0v400l129 -129l200 200l142 -142l-200 -200l129 -129h-400zM0 800l129 129l200 -200l142 142l-200 200l129 129h-400v-400zM729 329l142 142l200 -200l129 129v-400h-400l129 129zM729 871l200 200l-129 129h400v-400l-129 129l-200 -200z"
+        unicode="&#xe140;"/>
+      <glyph
+        d="M0 596q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM182 596q0 -172 121.5 -293t292.5 -121t292.5 121t121.5 293q0 171 -121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM291 655 q0 23 15.5 38.5t38.5 15.5t39 -16t16 -38q0 -23 -16 -39t-39 -16q-22 0 -38 16t-16 39zM400 850q0 22 16 38.5t39 16.5q22 0 38 -16t16 -39t-16 -39t-38 -16q-23 0 -39 16.5t-16 38.5zM513 609q0 32 21 56.5t52 29.5l122 126l1 1q-9 14 -9 28q0 22 16 38.5t39 16.5 q22 0 38 -16t16 -39t-16 -39t-38 -16q-16 0 -29 10l-55 -145q17 -22 17 -51q0 -36 -25.5 -61.5t-61.5 -25.5q-37 0 -62.5 25.5t-25.5 61.5zM800 655q0 22 16 38t39 16t38.5 -15.5t15.5 -38.5t-16 -39t-38 -16q-23 0 -39 16t-16 39z"
+        unicode="&#xe141;"/>
+      <glyph
+        d="M-40 375q-13 -95 35 -173q35 -57 94 -89t129 -32q63 0 119 28q33 16 65 40.5t52.5 45.5t59.5 64q40 44 57 61l394 394q35 35 47 84t-3 96q-27 87 -117 104q-20 2 -29 2q-46 0 -79.5 -17t-67.5 -51l-388 -396l-7 -7l69 -67l377 373q20 22 39 38q23 23 50 23q38 0 53 -36 q16 -39 -20 -75l-547 -547q-52 -52 -125 -52q-55 0 -100 33t-54 96q-5 35 2.5 66t31.5 63t42 50t56 54q24 21 44 41l348 348q52 52 82.5 79.5t84 54t107.5 26.5q25 0 48 -4q95 -17 154 -94.5t51 -175.5q-7 -101 -98 -192l-252 -249l-253 -256l7 -7l69 -60l517 511 q67 67 95 157t11 183q-16 87 -67 154t-130 103q-69 33 -152 33q-107 0 -197 -55q-40 -24 -111 -95l-512 -512q-68 -68 -81 -163z"
+        unicode="&#xe142;"/>
+      <glyph
+        d="M79 784q0 131 99 229.5t230 98.5q144 0 242 -129q103 129 245 129q130 0 227 -98.5t97 -229.5q0 -46 -17.5 -91t-61 -99t-77 -89.5t-104.5 -105.5q-197 -191 -293 -322l-17 -23l-16 23q-43 58 -100 122.5t-92 99.5t-101 100l-84.5 84.5t-68 74t-60 78t-33.5 70.5t-15 78z M250 784q0 -27 30.5 -70t61.5 -75.5t95 -94.5l22 -22q93 -90 190 -201q82 92 195 203l12 12q64 62 97.5 97t64.5 79t31 72q0 71 -48 119.5t-106 48.5q-73 0 -131 -83l-118 -171l-114 174q-51 80 -124 80q-59 0 -108.5 -49.5t-49.5 -118.5z"
+        unicode="&#xe143;"/>
+      <glyph
+        d="M57 353q0 -94 66 -160l141 -141q66 -66 159 -66q95 0 159 66l283 283q66 66 66 159t-66 159l-141 141q-12 12 -19 17l-105 -105l212 -212l-389 -389l-247 248l95 95l-18 18q-46 45 -75 101l-55 -55q-66 -66 -66 -159zM269 706q0 -93 66 -159l141 -141l19 -17l105 105 l-212 212l389 389l247 -247l-95 -96l18 -18q46 -46 77 -99l29 29q35 35 62.5 88t27.5 96q0 93 -66 159l-141 141q-66 66 -159 66q-95 0 -159 -66l-283 -283q-66 -64 -66 -159z"
+        unicode="&#xe144;"/>
+      <glyph
+        d="M200 100v953q0 21 30 46t81 48t129 38t163 15t162 -15t127 -38t79 -48t29 -46v-953q0 -41 -29.5 -70.5t-70.5 -29.5h-600q-41 0 -70.5 29.5t-29.5 70.5zM300 300h600v700h-600v-700zM496 150q0 -43 30.5 -73.5t73.5 -30.5t73.5 30.5t30.5 73.5t-30.5 73.5t-73.5 30.5 t-73.5 -30.5t-30.5 -73.5z"
+        unicode="&#xe145;"/>
+      <glyph
+        d="M0 0l303 380l207 208l-210 212h300l267 279l-35 36q-15 14 -15 35t15 35q14 15 35 15t35 -15l283 -282q15 -15 15 -36t-15 -35q-14 -15 -35 -15t-35 15l-36 35l-279 -267v-300l-212 210l-208 -207z"
+        unicode="&#xe146;"/>
+      <glyph
+        d="M295 433h139q5 -77 48.5 -126.5t117.5 -64.5v335l-27 7q-46 14 -79 26.5t-72 36t-62.5 52t-40 72.5t-16.5 99q0 92 44 159.5t109 101t144 40.5v78h100v-79q38 -4 72.5 -13.5t75.5 -31.5t71 -53.5t51.5 -84t24.5 -118.5h-159q-8 72 -35 109.5t-101 50.5v-307l64 -14 q34 -7 64 -16.5t70 -31.5t67.5 -52t47.5 -80.5t20 -112.5q0 -139 -89 -224t-244 -96v-77h-100v78q-152 17 -237 104q-40 40 -52.5 93.5t-15.5 139.5zM466 889q0 -29 8 -51t16.5 -34t29.5 -22.5t31 -13.5t38 -10q7 -2 11 -3v274q-61 -8 -97.5 -37.5t-36.5 -102.5zM700 237 q170 18 170 151q0 64 -44 99.5t-126 60.5v-311z"
+        unicode="&#xe148;"/>
+      <glyph
+        d="M100 600v100h166q-24 49 -44 104q-10 26 -14.5 55.5t-3 72.5t25 90t68.5 87q97 88 263 88q129 0 230 -89t101 -208h-153q0 52 -34 89.5t-74 51.5t-76 14q-37 0 -79 -14.5t-62 -35.5q-41 -44 -41 -101q0 -11 2.5 -24.5t5.5 -24t9.5 -26.5t10.5 -25t14 -27.5t14 -25.5 t15.5 -27t13.5 -24h242v-100h-197q8 -50 -2.5 -115t-31.5 -94q-41 -59 -99 -113q35 11 84 18t70 7q32 1 102 -16t104 -17q76 0 136 30l50 -147q-41 -25 -80.5 -36.5t-59 -13t-61.5 -1.5q-23 0 -128 33t-155 29q-39 -4 -82 -17t-66 -25l-24 -11l-55 145l16.5 11t15.5 10 t13.5 9.5t14.5 12t14.5 14t17.5 18.5q48 55 54 126.5t-30 142.5h-221z"
+        unicode="&#xe149;"/>
+      <glyph
+        d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM602 900l298 300l298 -300h-198v-900h-200v900h-198z"
+        unicode="&#xe150;"/>
+      <glyph
+        d="M2 300h198v900h200v-900h198l-298 -300zM700 0v200h100v-100h200v-100h-300zM700 400v100h300v-200h-99v-100h-100v100h99v100h-200zM700 700v500h300v-500h-100v100h-100v-100h-100zM801 900h100v200h-100v-200z"
+        unicode="&#xe151;"/>
+      <glyph
+        d="M2 300h198v900h200v-900h198l-298 -300zM700 0v500h300v-500h-100v100h-100v-100h-100zM700 700v200h100v-100h200v-100h-300zM700 1100v100h300v-200h-99v-100h-100v100h99v100h-200zM801 200h100v200h-100v-200z"
+        unicode="&#xe152;"/>
+      <glyph
+        d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM800 100v400h300v-500h-100v100h-200zM800 1100v100h200v-500h-100v400h-100zM901 200h100v200h-100v-200z"
+        unicode="&#xe153;"/>
+      <glyph
+        d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM800 400v100h200v-500h-100v400h-100zM800 800v400h300v-500h-100v100h-200zM901 900h100v200h-100v-200z"
+        unicode="&#xe154;"/>
+      <glyph
+        d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM700 100v200h500v-200h-500zM700 400v200h400v-200h-400zM700 700v200h300v-200h-300zM700 1000v200h200v-200h-200z"
+        unicode="&#xe155;"/>
+      <glyph
+        d="M2 300l298 -300l298 300h-198v900h-200v-900h-198zM700 100v200h200v-200h-200zM700 400v200h300v-200h-300zM700 700v200h400v-200h-400zM700 1000v200h500v-200h-500z"
+        unicode="&#xe156;"/>
+      <glyph
+        d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q162 0 281 -118.5t119 -281.5v-300q0 -165 -118.5 -282.5t-281.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500z"
+        unicode="&#xe157;"/>
+      <glyph
+        d="M0 400v300q0 163 119 281.5t281 118.5h300q165 0 282.5 -117.5t117.5 -282.5v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-163 0 -281.5 117.5t-118.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM400 300l333 250l-333 250v-500z"
+        unicode="&#xe158;"/>
+      <glyph
+        d="M0 400v300q0 163 117.5 281.5t282.5 118.5h300q163 0 281.5 -119t118.5 -281v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-300q-165 0 -282.5 117.5t-117.5 282.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM300 700l250 -333l250 333h-500z"
+        unicode="&#xe159;"/>
+      <glyph
+        d="M0 400v300q0 165 117.5 282.5t282.5 117.5h300q165 0 282.5 -117.5t117.5 -282.5v-300q0 -162 -118.5 -281t-281.5 -119h-300q-165 0 -282.5 118.5t-117.5 281.5zM200 300q0 -41 29.5 -70.5t70.5 -29.5h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5 h-500q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM300 400h500l-250 333z"
+        unicode="&#xe160;"/>
+      <glyph
+        d="M0 400v300h300v200l400 -350l-400 -350v200h-300zM500 0v200h500q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5h-500v200h400q165 0 282.5 -117.5t117.5 -282.5v-300q0 -165 -117.5 -282.5t-282.5 -117.5h-400z"
+        unicode="&#xe161;"/>
+      <glyph
+        d="M216 519q10 -19 32 -19h302q-155 -438 -160 -458q-5 -21 4 -32l9 -8l9 -1q13 0 26 16l538 630q15 19 6 36q-8 18 -32 16h-300q1 4 78 219.5t79 227.5q2 17 -6 27l-8 8h-9q-16 0 -25 -15q-4 -5 -98.5 -111.5t-228 -257t-209.5 -238.5q-17 -19 -7 -40z"
+        unicode="&#xe162;"/>
+      <glyph
+        d="M0 400q0 -165 117.5 -282.5t282.5 -117.5h300q47 0 100 15v185h-500q-41 0 -70.5 29.5t-29.5 70.5v500q0 41 29.5 70.5t70.5 29.5h500v185q-14 4 -114 7.5t-193 5.5l-93 2q-165 0 -282.5 -117.5t-117.5 -282.5v-300zM600 400v300h300v200l400 -350l-400 -350v200h-300z "
+        unicode="&#xe163;"/>
+      <glyph
+        d="M0 400q0 -165 117.5 -282.5t282.5 -117.5h300q163 0 281.5 117.5t118.5 282.5v98l-78 73l-122 -123v-148q0 -41 -29.5 -70.5t-70.5 -29.5h-500q-41 0 -70.5 29.5t-29.5 70.5v500q0 41 29.5 70.5t70.5 29.5h156l118 122l-74 78h-100q-165 0 -282.5 -117.5t-117.5 -282.5 v-300zM496 709l353 342l-149 149h500v-500l-149 149l-342 -353z"
+        unicode="&#xe164;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM406 600 q0 80 57 137t137 57t137 -57t57 -137t-57 -137t-137 -57t-137 57t-57 137z"
+        unicode="&#xe165;"/>
+      <glyph
+        d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 800l445 -500l450 500h-295v400h-300v-400h-300zM900 150h100v50h-100v-50z"
+        unicode="&#xe166;"/>
+      <glyph
+        d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 700h300v-300h300v300h295l-445 500zM900 150h100v50h-100v-50z"
+        unicode="&#xe167;"/>
+      <glyph
+        d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 705l305 -305l596 596l-154 155l-442 -442l-150 151zM900 150h100v50h-100v-50z"
+        unicode="&#xe168;"/>
+      <glyph
+        d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM100 988l97 -98l212 213l-97 97zM200 401h700v699l-250 -239l-149 149l-212 -212l149 -149zM900 150h100v50h-100v-50z"
+        unicode="&#xe169;"/>
+      <glyph
+        d="M0 0v275q0 11 7 18t18 7h1048q11 0 19 -7.5t8 -17.5v-275h-1100zM200 612l212 -212l98 97l-213 212zM300 1200l239 -250l-149 -149l212 -212l149 148l248 -237v700h-699zM900 150h100v50h-100v-50z"
+        unicode="&#xe170;"/>
+      <glyph d="M23 415l1177 784v-1079l-475 272l-310 -393v416h-392zM494 210l672 938l-672 -712v-226z"
+        unicode="&#xe171;"/>
+      <glyph
+        d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-850q0 -21 -15 -35.5t-35 -14.5h-150v400h-700v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 1000h100v200h-100v-200z"
+        unicode="&#xe172;"/>
+      <glyph
+        d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-218l-276 -275l-120 120l-126 -127h-378v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM581 306l123 123l120 -120l353 352l123 -123l-475 -476zM600 1000h100v200h-100v-200z"
+        unicode="&#xe173;"/>
+      <glyph
+        d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-269l-103 -103l-170 170l-298 -298h-329v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 1000h100v200h-100v-200zM700 133l170 170l-170 170l127 127l170 -170l170 170l127 -128l-170 -169l170 -170 l-127 -127l-170 170l-170 -170z"
+        unicode="&#xe174;"/>
+      <glyph
+        d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-300h-400v-200h-500v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 300l300 -300l300 300h-200v300h-200v-300h-200zM600 1000v200h100v-200h-100z"
+        unicode="&#xe175;"/>
+      <glyph
+        d="M0 150v1000q0 20 14.5 35t35.5 15h250v-300h500v300h100l200 -200v-402l-200 200l-298 -298h-402v-400h-150q-21 0 -35.5 14.5t-14.5 35.5zM600 300h200v-300h200v300h200l-300 300zM600 1000v200h100v-200h-100z"
+        unicode="&#xe176;"/>
+      <glyph
+        d="M0 250q0 -21 14.5 -35.5t35.5 -14.5h1100q21 0 35.5 14.5t14.5 35.5v550h-1200v-550zM0 900h1200v150q0 21 -14.5 35.5t-35.5 14.5h-1100q-21 0 -35.5 -14.5t-14.5 -35.5v-150zM100 300v200h400v-200h-400z"
+        unicode="&#xe177;"/>
+      <glyph
+        d="M0 400l300 298v-198h400v-200h-400v-198zM100 800v200h100v-200h-100zM300 800v200h100v-200h-100zM500 800v200h400v198l300 -298l-300 -298v198h-400zM800 300v200h100v-200h-100zM1000 300h100v200h-100v-200z"
+        unicode="&#xe178;"/>
+      <glyph
+        d="M100 700v400l50 100l50 -100v-300h100v300l50 100l50 -100v-300h100v300l50 100l50 -100v-400l-100 -203v-447q0 -21 -14.5 -35.5t-35.5 -14.5h-200q-21 0 -35.5 14.5t-14.5 35.5v447zM800 597q0 -29 10.5 -55.5t25 -43t29 -28.5t25.5 -18l10 -5v-397q0 -21 14.5 -35.5 t35.5 -14.5h200q21 0 35.5 14.5t14.5 35.5v1106q0 31 -18 40.5t-44 -7.5l-276 -117q-25 -16 -43.5 -50.5t-18.5 -65.5v-359z"
+        unicode="&#xe179;"/>
+      <glyph
+        d="M100 0h400v56q-75 0 -87.5 6t-12.5 44v394h500v-394q0 -38 -12.5 -44t-87.5 -6v-56h400v56q-4 0 -11 0.5t-24 3t-30 7t-24 15t-11 24.5v888q0 22 25 34.5t50 13.5l25 2v56h-400v-56q75 0 87.5 -6t12.5 -44v-394h-500v394q0 38 12.5 44t87.5 6v56h-400v-56q4 0 11 -0.5 t24 -3t30 -7t24 -15t11 -24.5v-888q0 -22 -25 -34.5t-50 -13.5l-25 -2v-56z"
+        unicode="&#xe180;"/>
+      <glyph
+        d="M0 300q0 -41 29.5 -70.5t70.5 -29.5h300q41 0 70.5 29.5t29.5 70.5v500q0 41 -29.5 70.5t-70.5 29.5h-300q-41 0 -70.5 -29.5t-29.5 -70.5v-500zM100 100h400l200 200h105l295 98v-298h-425l-100 -100h-375zM100 300v200h300v-200h-300zM100 600v200h300v-200h-300z M100 1000h400l200 -200v-98l295 98h105v200h-425l-100 100h-375zM700 402v163l400 133v-163z"
+        unicode="&#xe181;"/>
+      <glyph
+        d="M16.5 974.5q0.5 -21.5 16 -90t46.5 -140t104 -177.5t175 -208q103 -103 207.5 -176t180 -103.5t137 -47t92.5 -16.5l31 1l163 162q16 17 13 40.5t-22 37.5l-192 136q-19 14 -45 12t-42 -19l-119 -118q-143 103 -267 227q-126 126 -227 268l118 118q17 17 20 41.5 t-11 44.5l-139 194q-14 19 -36.5 22t-40.5 -14l-162 -162q-1 -11 -0.5 -32.5z"
+        unicode="&#xe182;"/>
+      <glyph
+        d="M0 50v212q0 20 10.5 45.5t24.5 39.5l365 303v50q0 4 1 10.5t12 22.5t30 28.5t60 23t97 10.5t97 -10t60 -23.5t30 -27.5t12 -24l1 -10v-50l365 -303q14 -14 24.5 -39.5t10.5 -45.5v-212q0 -21 -15 -35.5t-35 -14.5h-1100q-21 0 -35.5 14.5t-14.5 35.5zM0 712 q0 -21 14.5 -33.5t34.5 -8.5l202 33q20 4 34.5 21t14.5 38v146q141 24 300 24t300 -24v-146q0 -21 14.5 -38t34.5 -21l202 -33q20 -4 34.5 8.5t14.5 33.5v200q-6 8 -19 20.5t-63 45t-112 57t-171 45t-235 20.5q-92 0 -175 -10.5t-141.5 -27t-108.5 -36.5t-81.5 -40 t-53.5 -36.5t-31 -27.5l-9 -10v-200z"
+        unicode="&#xe183;"/>
+      <glyph
+        d="M100 0v100h1100v-100h-1100zM175 200h950l-125 150v250l100 100v400h-100v-200h-100v200h-200v-200h-100v200h-200v-200h-100v200h-100v-400l100 -100v-250z"
+        unicode="&#xe184;"/>
+      <glyph
+        d="M100 0h300v400q0 41 -29.5 70.5t-70.5 29.5h-100q-41 0 -70.5 -29.5t-29.5 -70.5v-400zM500 0v1000q0 41 29.5 70.5t70.5 29.5h100q41 0 70.5 -29.5t29.5 -70.5v-1000h-300zM900 0v700q0 41 29.5 70.5t70.5 29.5h100q41 0 70.5 -29.5t29.5 -70.5v-700h-300z"
+        unicode="&#xe185;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v300h-200v100h200v100h-300v-300h200v-100h-200v-100zM600 300h200v100h100v300h-100v100h-200v-500 zM700 400v300h100v-300h-100z"
+        unicode="&#xe186;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h100v200h100v-200h100v500h-100v-200h-100v200h-100v-500zM600 300h200v100h100v300h-100v100h-200v-500 zM700 400v300h100v-300h-100z"
+        unicode="&#xe187;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v100h-200v300h200v100h-300v-500zM600 300h300v100h-200v300h200v100h-300v-500z"
+        unicode="&#xe188;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 550l300 -150v300zM600 400l300 150l-300 150v-300z"
+        unicode="&#xe189;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300v500h700v-500h-700zM300 400h130q41 0 68 42t27 107t-28.5 108t-66.5 43h-130v-300zM575 549 q0 -65 27 -107t68 -42h130v300h-130q-38 0 -66.5 -43t-28.5 -108z"
+        unicode="&#xe190;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v300h-200v100h200v100h-300v-300h200v-100h-200v-100zM601 300h100v100h-100v-100zM700 700h100 v-400h100v500h-200v-100z"
+        unicode="&#xe191;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 300h300v400h-200v100h-100v-500zM301 400v200h100v-200h-100zM601 300h100v100h-100v-100zM700 700h100 v-400h100v500h-200v-100z"
+        unicode="&#xe192;"/>
+      <glyph
+        d="M-100 300v500q0 124 88 212t212 88h700q124 0 212 -88t88 -212v-500q0 -124 -88 -212t-212 -88h-700q-124 0 -212 88t-88 212zM100 200h900v700h-900v-700zM200 700v100h300v-300h-99v-100h-100v100h99v200h-200zM201 300v100h100v-100h-100zM601 300v100h100v-100h-100z M700 700v100h200v-500h-100v400h-100z"
+        unicode="&#xe193;"/>
+      <glyph
+        d="M4 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM186 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM400 500v200 l100 100h300v-100h-300v-200h300v-100h-300z"
+        unicode="&#xe194;"/>
+      <glyph
+        d="M0 600q0 162 80 299t217 217t299 80t299 -80t217 -217t80 -299t-80 -299t-217 -217t-299 -80t-299 80t-217 217t-80 299zM182 600q0 -171 121.5 -292.5t292.5 -121.5t292.5 121.5t121.5 292.5t-121.5 292.5t-292.5 121.5t-292.5 -121.5t-121.5 -292.5zM400 400v400h300 l100 -100v-100h-100v100h-200v-100h200v-100h-200v-100h-100zM700 400v100h100v-100h-100z"
+        unicode="&#xe195;"/>
+      <glyph
+        d="M-14 494q0 -80 56.5 -137t135.5 -57h222v300h400v-300h128q120 0 205 86t85 208q0 120 -85 206.5t-205 86.5q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5zM300 200h200v300h200v-300 h200l-300 -300z"
+        unicode="&#xe197;"/>
+      <glyph
+        d="M-14 494q0 -80 56.5 -137t135.5 -57h8l414 414l403 -403q94 26 154.5 104t60.5 178q0 121 -85 207.5t-205 86.5q-46 0 -90 -14q-44 97 -134.5 156.5t-200.5 59.5q-152 0 -260 -107.5t-108 -260.5q0 -25 2 -37q-66 -14 -108.5 -67.5t-42.5 -122.5zM300 200l300 300 l300 -300h-200v-300h-200v300h-200z"
+        unicode="&#xe198;"/>
+      <glyph
+        d="M100 200h400v-155l-75 -45h350l-75 45v155h400l-270 300h170l-270 300h170l-300 333l-300 -333h170l-270 -300h170z"
+        unicode="&#xe199;"/>
+      <glyph
+        d="M121 700q0 -53 28.5 -97t75.5 -65q-4 -16 -4 -38q0 -74 52.5 -126.5t126.5 -52.5q56 0 100 30v-306l-75 -45h350l-75 45v306q46 -30 100 -30q74 0 126.5 52.5t52.5 126.5q0 24 -9 55q50 32 79.5 83t29.5 112q0 90 -61.5 155.5t-150.5 71.5q-26 89 -99.5 145.5 t-167.5 56.5q-116 0 -197.5 -81.5t-81.5 -197.5q0 -4 1 -12t1 -11q-14 2 -23 2q-74 0 -126.5 -52.5t-52.5 -126.5z"
+        unicode="&#xe200;"/>
+    </font>
+  </defs>
+</svg>
diff --git a/azkaban-web-server/src/web/js/jquery/jquery-1.9.1.js b/azkaban-web-server/src/web/js/jquery/jquery-1.9.1.js
index e2c203f..8e4906d 100644
--- a/azkaban-web-server/src/web/js/jquery/jquery-1.9.1.js
+++ b/azkaban-web-server/src/web/js/jquery/jquery-1.9.1.js
@@ -8175,89 +8175,94 @@ function ajaxHandleResponses( s, jqXHR, responses ) {
 }
 
 // Chain conversions given the request and the original response
-function ajaxConvert( s, response ) {
-	var conv2, current, conv, tmp,
-		converters = {},
-		i = 0,
-		// Work with a copy of dataTypes in case we need to modify it for conversion
-		dataTypes = s.dataTypes.slice(),
-		prev = dataTypes[ 0 ];
-
-	// Apply the dataFilter if provided
-	if ( s.dataFilter ) {
-		response = s.dataFilter( response, s.dataType );
-	}
-
-	// Create converters map with lowercased keys
-	if ( dataTypes[ 1 ] ) {
-		for ( conv in s.converters ) {
-			converters[ conv.toLowerCase() ] = s.converters[ conv ];
-		}
-	}
-
-	// Convert to each sequential dataType, tolerating list modification
-	for ( ; (current = dataTypes[++i]); ) {
-
-		// There's only work to do if current dataType is non-auto
-		if ( current !== "*" ) {
-
-			// Convert response if prev dataType is non-auto and differs from current
-			if ( prev !== "*" && prev !== current ) {
-
-				// Seek a direct converter
-				conv = converters[ prev + " " + current ] || converters[ "* " + current ];
-
-				// If none found, seek a pair
-				if ( !conv ) {
-					for ( conv2 in converters ) {
-
-						// If conv2 outputs current
-						tmp = conv2.split(" ");
-						if ( tmp[ 1 ] === current ) {
-
-							// If prev can be converted to accepted input
-							conv = converters[ prev + " " + tmp[ 0 ] ] ||
-								converters[ "* " + tmp[ 0 ] ];
-							if ( conv ) {
-								// Condense equivalence converters
-								if ( conv === true ) {
-									conv = converters[ conv2 ];
-
-								// Otherwise, insert the intermediate dataType
-								} else if ( converters[ conv2 ] !== true ) {
-									current = tmp[ 0 ];
-									dataTypes.splice( i--, 0, current );
-								}
-
-								break;
-							}
-						}
-					}
-				}
-
-				// Apply converter (if not an equivalence)
-				if ( conv !== true ) {
-
-					// Unless errors are allowed to bubble, catch and return them
-					if ( conv && s["throws"] ) {
-						response = conv( response );
-					} else {
-						try {
-							response = conv( response );
-						} catch ( e ) {
-							return { state: "parsererror", error: conv ? e : "No conversion from " + prev + " to " + current };
-						}
-					}
-				}
-			}
+  function ajaxConvert(s, response) {
+    var conv2, current, conv, tmp,
+        converters = {},
+        i = 0,
+        // Work with a copy of dataTypes in case we need to modify it for conversion
+        dataTypes = s.dataTypes.slice(),
+        prev = dataTypes[0];
+
+    // Apply the dataFilter if provided
+    if (s.dataFilter) {
+      response = s.dataFilter(response, s.dataType);
+    }
+
+    // Create converters map with lowercased keys
+    if (dataTypes[1]) {
+      for (conv in s.converters) {
+        converters[conv.toLowerCase()] = s.converters[conv];
+      }
+    }
+
+    // Convert to each sequential dataType, tolerating list modification
+    for (; (current = dataTypes[++i]);) {
+
+      // There's only work to do if current dataType is non-auto
+      if (current !== "*") {
+
+        // Convert response if prev dataType is non-auto and differs from current
+        if (prev !== "*" && prev !== current) {
+
+          // Seek a direct converter
+          conv = converters[prev + " " + current] || converters["* " + current];
+
+          // If none found, seek a pair
+          if (!conv) {
+            for (conv2 in converters) {
+
+              // If conv2 outputs current
+              tmp = conv2.split(" ");
+              if (tmp[1] === current) {
+
+                // If prev can be converted to accepted input
+                conv = converters[prev + " " + tmp[0]] ||
+                    converters["* " + tmp[0]];
+                if (conv) {
+                  // Condense equivalence converters
+                  if (conv === true) {
+                    conv = converters[conv2];
+
+                    // Otherwise, insert the intermediate dataType
+                  } else if (converters[conv2] !== true) {
+                    current = tmp[0];
+                    dataTypes.splice(i--, 0, current);
+                  }
+
+                  break;
+                }
+              }
+            }
+          }
+
+          // Apply converter (if not an equivalence)
+          if (conv !== true) {
+
+            // Unless errors are allowed to bubble, catch and return them
+            if (conv && s["throws"]) {
+              response = conv(response);
+            } else {
+              try {
+                response = conv(response);
+              } catch (e) {
+                return {
+                  state: "parsererror",
+                  error: conv ? e : "No conversion from " + prev + " to "
+                      + current
+                };
+              }
+            }
+          }
+        }
+
+        // Update prev for next iteration
+        prev = current;
+      }
+    }
+
+    return {state: "success", data: response};
+  }
 
-			// Update prev for next iteration
-			prev = current;
-		}
-	}
-
-	return { state: "success", data: response };
-}
 // Install script dataType
 jQuery.ajaxSetup({
 	accepts: {

build.gradle 144(+72 -72)

diff --git a/build.gradle b/build.gradle
index 69ec4a8..840f9d4 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,105 +1,105 @@
 buildscript {
-  repositories {
-    mavenCentral()
-    maven {
-      url 'https://plugins.gradle.org/m2/'
+    repositories {
+        mavenCentral()
+        maven {
+            url 'https://plugins.gradle.org/m2/'
+        }
+    }
+    dependencies {
+        classpath 'com.cinnober.gradle:semver-git:2.2.3'
+        classpath 'net.ltgt.gradle:gradle-errorprone-plugin:0.0.10'
     }
-  }
-  dependencies {
-    classpath 'com.cinnober.gradle:semver-git:2.2.3'
-    classpath 'net.ltgt.gradle:gradle-errorprone-plugin:0.0.10'
-  }
 }
 
 apply plugin: 'com.cinnober.gradle.semver-git'
 apply plugin: 'idea'
 
 allprojects {
-  repositories {
-    mavenCentral()
-    mavenLocal()
-  }
+    repositories {
+        mavenCentral()
+        mavenLocal()
+    }
 }
 
 subprojects {
-  apply plugin: 'java'
-  apply plugin: 'idea'
-  apply plugin: 'eclipse'
-  apply plugin: 'net.ltgt.errorprone'
+    apply plugin: 'java'
+    apply plugin: 'idea'
+    apply plugin: 'eclipse'
+    apply plugin: 'net.ltgt.errorprone'
 
-  // Set the same version for all sub-projects to root project version
-  version = rootProject.version
+    // Set the same version for all sub-projects to root project version
+    version = rootProject.version
 
-  plugins.withType(JavaPlugin) {
-    sourceCompatibility = JavaVersion.VERSION_1_8
+    plugins.withType(JavaPlugin) {
+        sourceCompatibility = JavaVersion.VERSION_1_8
 
-    /*
-     TODO remove afterEvaluate block
-     After Evaluate block was added to do a lazy evaluation. This piece of code gets executed by gradle in the
-     configuration phase. However, for some reason the version field was not updated by the LinkedIn build
-     infrastructure. Thus, using afterEvaluate to do a lazy evaluation of this code block.
-     More specifically afterEvaluate kicks in after the rest of the project is configured
+        /*
+         TODO remove afterEvaluate block
+         After Evaluate block was added to do a lazy evaluation. This piece of code gets executed by gradle in the
+         configuration phase. However, for some reason the version field was not updated by the LinkedIn build
+         infrastructure. Thus, using afterEvaluate to do a lazy evaluation of this code block.
+         More specifically afterEvaluate kicks in after the rest of the project is configured
 
-     See: http://stackoverflow.com/questions/16218888/can-gradle-extensions-handle-lazy-evaluation-of-a-property
-     See: http://stackoverflow.com/questions/16070567/difference-between-gradles-terms-evaluation-and-execution
-     */
-    project.afterEvaluate {
-      // Set the Title and Version fields in the jar
-      jar {
-        manifest {
-          attributes(
-              'Implementation-Title': project.name,
-              'Implementation-Version': project.version)
+         See: http://stackoverflow.com/questions/16218888/can-gradle-extensions-handle-lazy-evaluation-of-a-property
+         See: http://stackoverflow.com/questions/16070567/difference-between-gradles-terms-evaluation-and-execution
+         */
+        project.afterEvaluate {
+            // Set the Title and Version fields in the jar
+            jar {
+                manifest {
+                    attributes(
+                            'Implementation-Title': project.name,
+                            'Implementation-Version': project.version)
+                }
+            }
         }
-      }
-    }
 
-    dependencies {
-      testCompile('junit:junit:4.12')
+        dependencies {
+            testCompile('junit:junit:4.12')
+        }
     }
-  }
 
-  // Common distribution plugin settings for sub-modules
-  plugins.withType(DistributionPlugin) {
-    distTar {
-      compression = Compression.GZIP
-      extension = 'tar.gz'
+    // Common distribution plugin settings for sub-modules
+    plugins.withType(DistributionPlugin) {
+        distTar {
+            compression = Compression.GZIP
+            extension = 'tar.gz'
+        }
     }
-  }
 
-  /**
-   * Print test execution summary when informational logging is enabled.
-   */
-  test {
-    testLogging {
-      exceptionFormat = 'full'
-      afterSuite { desc, result ->
-        if (desc.getParent()) {
-          logger.info desc.getName()
-        } else {
-          logger.info "Overall"
+    /**
+     * Print test execution summary when informational logging is enabled.
+     */
+    test {
+        testLogging {
+            exceptionFormat = 'full'
+            afterSuite { desc, result ->
+                if (desc.getParent()) {
+                    logger.info desc.getName()
+                } else {
+                    logger.info "Overall"
+                }
+                logger.info "  ${result.resultType} (" +
+                        "${result.testCount} tests, " +
+                        "${result.successfulTestCount} passed, " +
+                        "${result.failedTestCount} failed, " +
+                        "${result.skippedTestCount} skipped)"
+            }
         }
-        logger.info "  ${result.resultType} (" +
-              "${result.testCount} tests, " +
-              "${result.successfulTestCount} passed, " +
-              "${result.failedTestCount} failed, " +
-              "${result.skippedTestCount} skipped)"
-      }
     }
-  }
 }
 
 /**
  * Gradle wrapper task.
  */
 task wrapper(type: Wrapper) {
-  gradleVersion = '3.5'
-  distributionType('ALL')
+    gradleVersion = '3.5'
+    distributionType('ALL')
 }
 
 idea {
-  project {
-    languageLevel = '1.8'
-    vcs = 'Git'
-  }
+    project {
+        languageLevel = '1.8'
+        vcs = 'Git'
+    }
 }
diff --git a/gradle.properties b/gradle.properties
index bd5c316..fae1156 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -1,27 +1,20 @@
 group=com.linkedin
-
 # optionally: ext.nextVersion = "major", "minor" (default), "patch" or e.g. "3.0.0-rc2"
 # optionally: ext.snapshotSuffix = "SNAPSHOT" (default) or a pattern, e.g. "<count>.g<sha>-SNAPSHOT"
 #
 # Eg: artifacts will be created in the format azkaban-web-server-2.7.0-474-gbc0be80.tar.gz
 snapshotSuffix=<count>-g<sha>
-
 #long-running Gradle process speeds up local builds
 #to stop the daemon run 'ligradle --stop'
 org.gradle.daemon=true
-
 #configures only relevant projects to speed up the configuration of large projects
 #useful when specific project/task is invoked e.g: ligradle :cloud:cloud-api:build
 org.gradle.configureondemand=true
-
 #Gradle will run tasks from subprojects in parallel
 #Higher CPU usage, faster builds
 org.gradle.parallel=true
-
 #Allows generation of idea/eclipse metadata for a specific subproject and its upstream project dependencies
 ide.recursive=true
-
-
 #---------------------------------
 # Versions
 #---------------------------------
diff --git a/intellij-java-google-style.xml b/intellij-java-google-style.xml
index 89db9c9..229f277 100644
--- a/intellij-java-google-style.xml
+++ b/intellij-java-google-style.xml
@@ -2,220 +2,220 @@
 <code_scheme name="GoogleStyle">
   <option name="OTHER_INDENT_OPTIONS">
     <value>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
-      <option name="USE_TAB_CHARACTER" value="false" />
-      <option name="SMART_TABS" value="false" />
-      <option name="LABEL_INDENT_SIZE" value="0" />
-      <option name="LABEL_INDENT_ABSOLUTE" value="false" />
-      <option name="USE_RELATIVE_INDENTS" value="false" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
+      <option name="USE_TAB_CHARACTER" value="false"/>
+      <option name="SMART_TABS" value="false"/>
+      <option name="LABEL_INDENT_SIZE" value="0"/>
+      <option name="LABEL_INDENT_ABSOLUTE" value="false"/>
+      <option name="USE_RELATIVE_INDENTS" value="false"/>
     </value>
   </option>
-  <option name="INSERT_INNER_CLASS_IMPORTS" value="true" />
-  <option name="CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999" />
-  <option name="NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999" />
+  <option name="INSERT_INNER_CLASS_IMPORTS" value="true"/>
+  <option name="CLASS_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999"/>
+  <option name="NAMES_COUNT_TO_USE_IMPORT_ON_DEMAND" value="999"/>
   <option name="PACKAGES_TO_USE_IMPORT_ON_DEMAND">
-    <value />
+    <value/>
   </option>
   <option name="IMPORT_LAYOUT_TABLE">
     <value>
-      <package name="" withSubpackages="true" static="true" />
-      <emptyLine />
-      <package name="" withSubpackages="true" static="false" />
+      <package name="" static="true" withSubpackages="true"/>
+      <emptyLine/>
+      <package name="" static="false" withSubpackages="true"/>
     </value>
   </option>
-  <option name="RIGHT_MARGIN" value="100" />
-  <option name="JD_ALIGN_PARAM_COMMENTS" value="false" />
-  <option name="JD_ALIGN_EXCEPTION_COMMENTS" value="false" />
-  <option name="JD_P_AT_EMPTY_LINES" value="false" />
-  <option name="JD_KEEP_EMPTY_PARAMETER" value="false" />
-  <option name="JD_KEEP_EMPTY_EXCEPTION" value="false" />
-  <option name="JD_KEEP_EMPTY_RETURN" value="false" />
-  <option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false" />
-  <option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
-  <option name="BLANK_LINES_AFTER_CLASS_HEADER" value="1" />
-  <option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
-  <option name="ALIGN_MULTILINE_FOR" value="false" />
-  <option name="CALL_PARAMETERS_WRAP" value="1" />
-  <option name="METHOD_PARAMETERS_WRAP" value="1" />
-  <option name="EXTENDS_LIST_WRAP" value="1" />
-  <option name="THROWS_KEYWORD_WRAP" value="1" />
-  <option name="METHOD_CALL_CHAIN_WRAP" value="1" />
-  <option name="BINARY_OPERATION_WRAP" value="1" />
-  <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
-  <option name="TERNARY_OPERATION_WRAP" value="1" />
-  <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
-  <option name="FOR_STATEMENT_WRAP" value="1" />
-  <option name="ARRAY_INITIALIZER_WRAP" value="1" />
-  <option name="WRAP_COMMENTS" value="true" />
-  <option name="IF_BRACE_FORCE" value="3" />
-  <option name="DOWHILE_BRACE_FORCE" value="3" />
-  <option name="WHILE_BRACE_FORCE" value="3" />
-  <option name="FOR_BRACE_FORCE" value="3" />
+  <option name="RIGHT_MARGIN" value="100"/>
+  <option name="JD_ALIGN_PARAM_COMMENTS" value="false"/>
+  <option name="JD_ALIGN_EXCEPTION_COMMENTS" value="false"/>
+  <option name="JD_P_AT_EMPTY_LINES" value="false"/>
+  <option name="JD_KEEP_EMPTY_PARAMETER" value="false"/>
+  <option name="JD_KEEP_EMPTY_EXCEPTION" value="false"/>
+  <option name="JD_KEEP_EMPTY_RETURN" value="false"/>
+  <option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false"/>
+  <option name="KEEP_BLANK_LINES_IN_CODE" value="1"/>
+  <option name="BLANK_LINES_AFTER_CLASS_HEADER" value="1"/>
+  <option name="ALIGN_MULTILINE_PARAMETERS" value="false"/>
+  <option name="ALIGN_MULTILINE_FOR" value="false"/>
+  <option name="CALL_PARAMETERS_WRAP" value="1"/>
+  <option name="METHOD_PARAMETERS_WRAP" value="1"/>
+  <option name="EXTENDS_LIST_WRAP" value="1"/>
+  <option name="THROWS_KEYWORD_WRAP" value="1"/>
+  <option name="METHOD_CALL_CHAIN_WRAP" value="1"/>
+  <option name="BINARY_OPERATION_WRAP" value="1"/>
+  <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true"/>
+  <option name="TERNARY_OPERATION_WRAP" value="1"/>
+  <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true"/>
+  <option name="FOR_STATEMENT_WRAP" value="1"/>
+  <option name="ARRAY_INITIALIZER_WRAP" value="1"/>
+  <option name="WRAP_COMMENTS" value="true"/>
+  <option name="IF_BRACE_FORCE" value="3"/>
+  <option name="DOWHILE_BRACE_FORCE" value="3"/>
+  <option name="WHILE_BRACE_FORCE" value="3"/>
+  <option name="FOR_BRACE_FORCE" value="3"/>
   <AndroidXmlCodeStyleSettings>
-    <option name="USE_CUSTOM_SETTINGS" value="true" />
+    <option name="USE_CUSTOM_SETTINGS" value="true"/>
     <option name="LAYOUT_SETTINGS">
       <value>
-        <option name="INSERT_BLANK_LINE_BEFORE_TAG" value="false" />
+        <option name="INSERT_BLANK_LINE_BEFORE_TAG" value="false"/>
       </value>
     </option>
   </AndroidXmlCodeStyleSettings>
   <JSCodeStyleSettings>
-    <option name="INDENT_CHAINED_CALLS" value="false" />
+    <option name="INDENT_CHAINED_CALLS" value="false"/>
   </JSCodeStyleSettings>
   <Python>
-    <option name="USE_CONTINUATION_INDENT_FOR_ARGUMENTS" value="true" />
+    <option name="USE_CONTINUATION_INDENT_FOR_ARGUMENTS" value="true"/>
   </Python>
   <TypeScriptCodeStyleSettings>
-    <option name="INDENT_CHAINED_CALLS" value="false" />
+    <option name="INDENT_CHAINED_CALLS" value="false"/>
   </TypeScriptCodeStyleSettings>
   <XML>
-    <option name="XML_ALIGN_ATTRIBUTES" value="false" />
-    <option name="XML_LEGACY_SETTINGS_IMPORTED" value="true" />
+    <option name="XML_ALIGN_ATTRIBUTES" value="false"/>
+    <option name="XML_LEGACY_SETTINGS_IMPORTED" value="true"/>
   </XML>
   <codeStyleSettings language="CSS">
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="ECMA Script Level 4">
-    <option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
-    <option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
-    <option name="ALIGN_MULTILINE_FOR" value="false" />
-    <option name="CALL_PARAMETERS_WRAP" value="1" />
-    <option name="METHOD_PARAMETERS_WRAP" value="1" />
-    <option name="EXTENDS_LIST_WRAP" value="1" />
-    <option name="BINARY_OPERATION_WRAP" value="1" />
-    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
-    <option name="TERNARY_OPERATION_WRAP" value="1" />
-    <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
-    <option name="FOR_STATEMENT_WRAP" value="1" />
-    <option name="ARRAY_INITIALIZER_WRAP" value="1" />
-    <option name="IF_BRACE_FORCE" value="3" />
-    <option name="DOWHILE_BRACE_FORCE" value="3" />
-    <option name="WHILE_BRACE_FORCE" value="3" />
-    <option name="FOR_BRACE_FORCE" value="3" />
-    <option name="PARENT_SETTINGS_INSTALLED" value="true" />
+    <option name="KEEP_BLANK_LINES_IN_CODE" value="1"/>
+    <option name="ALIGN_MULTILINE_PARAMETERS" value="false"/>
+    <option name="ALIGN_MULTILINE_FOR" value="false"/>
+    <option name="CALL_PARAMETERS_WRAP" value="1"/>
+    <option name="METHOD_PARAMETERS_WRAP" value="1"/>
+    <option name="EXTENDS_LIST_WRAP" value="1"/>
+    <option name="BINARY_OPERATION_WRAP" value="1"/>
+    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true"/>
+    <option name="TERNARY_OPERATION_WRAP" value="1"/>
+    <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true"/>
+    <option name="FOR_STATEMENT_WRAP" value="1"/>
+    <option name="ARRAY_INITIALIZER_WRAP" value="1"/>
+    <option name="IF_BRACE_FORCE" value="3"/>
+    <option name="DOWHILE_BRACE_FORCE" value="3"/>
+    <option name="WHILE_BRACE_FORCE" value="3"/>
+    <option name="FOR_BRACE_FORCE" value="3"/>
+    <option name="PARENT_SETTINGS_INSTALLED" value="true"/>
   </codeStyleSettings>
   <codeStyleSettings language="HTML">
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="JAVA">
-    <option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false" />
-    <option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
-    <option name="BLANK_LINES_AFTER_CLASS_HEADER" value="1" />
-    <option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
-    <option name="ALIGN_MULTILINE_RESOURCES" value="false" />
-    <option name="ALIGN_MULTILINE_FOR" value="false" />
-    <option name="CALL_PARAMETERS_WRAP" value="1" />
-    <option name="METHOD_PARAMETERS_WRAP" value="1" />
-    <option name="EXTENDS_LIST_WRAP" value="1" />
-    <option name="THROWS_KEYWORD_WRAP" value="1" />
-    <option name="METHOD_CALL_CHAIN_WRAP" value="1" />
-    <option name="BINARY_OPERATION_WRAP" value="1" />
-    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
-    <option name="TERNARY_OPERATION_WRAP" value="1" />
-    <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
-    <option name="FOR_STATEMENT_WRAP" value="1" />
-    <option name="ARRAY_INITIALIZER_WRAP" value="1" />
-    <option name="WRAP_COMMENTS" value="true" />
-    <option name="IF_BRACE_FORCE" value="3" />
-    <option name="DOWHILE_BRACE_FORCE" value="3" />
-    <option name="WHILE_BRACE_FORCE" value="3" />
-    <option name="FOR_BRACE_FORCE" value="3" />
-    <option name="PARENT_SETTINGS_INSTALLED" value="true" />
+    <option name="KEEP_CONTROL_STATEMENT_IN_ONE_LINE" value="false"/>
+    <option name="KEEP_BLANK_LINES_IN_CODE" value="1"/>
+    <option name="BLANK_LINES_AFTER_CLASS_HEADER" value="1"/>
+    <option name="ALIGN_MULTILINE_PARAMETERS" value="false"/>
+    <option name="ALIGN_MULTILINE_RESOURCES" value="false"/>
+    <option name="ALIGN_MULTILINE_FOR" value="false"/>
+    <option name="CALL_PARAMETERS_WRAP" value="1"/>
+    <option name="METHOD_PARAMETERS_WRAP" value="1"/>
+    <option name="EXTENDS_LIST_WRAP" value="1"/>
+    <option name="THROWS_KEYWORD_WRAP" value="1"/>
+    <option name="METHOD_CALL_CHAIN_WRAP" value="1"/>
+    <option name="BINARY_OPERATION_WRAP" value="1"/>
+    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true"/>
+    <option name="TERNARY_OPERATION_WRAP" value="1"/>
+    <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true"/>
+    <option name="FOR_STATEMENT_WRAP" value="1"/>
+    <option name="ARRAY_INITIALIZER_WRAP" value="1"/>
+    <option name="WRAP_COMMENTS" value="true"/>
+    <option name="IF_BRACE_FORCE" value="3"/>
+    <option name="DOWHILE_BRACE_FORCE" value="3"/>
+    <option name="WHILE_BRACE_FORCE" value="3"/>
+    <option name="FOR_BRACE_FORCE" value="3"/>
+    <option name="PARENT_SETTINGS_INSTALLED" value="true"/>
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="JSON">
     <indentOptions>
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="JavaScript">
-    <option name="RIGHT_MARGIN" value="80" />
-    <option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
-    <option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
-    <option name="ALIGN_MULTILINE_FOR" value="false" />
-    <option name="CALL_PARAMETERS_WRAP" value="1" />
-    <option name="METHOD_PARAMETERS_WRAP" value="1" />
-    <option name="BINARY_OPERATION_WRAP" value="1" />
-    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
-    <option name="TERNARY_OPERATION_WRAP" value="1" />
-    <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true" />
-    <option name="FOR_STATEMENT_WRAP" value="1" />
-    <option name="ARRAY_INITIALIZER_WRAP" value="1" />
-    <option name="IF_BRACE_FORCE" value="3" />
-    <option name="DOWHILE_BRACE_FORCE" value="3" />
-    <option name="WHILE_BRACE_FORCE" value="3" />
-    <option name="FOR_BRACE_FORCE" value="3" />
-    <option name="PARENT_SETTINGS_INSTALLED" value="true" />
+    <option name="RIGHT_MARGIN" value="80"/>
+    <option name="KEEP_BLANK_LINES_IN_CODE" value="1"/>
+    <option name="ALIGN_MULTILINE_PARAMETERS" value="false"/>
+    <option name="ALIGN_MULTILINE_FOR" value="false"/>
+    <option name="CALL_PARAMETERS_WRAP" value="1"/>
+    <option name="METHOD_PARAMETERS_WRAP" value="1"/>
+    <option name="BINARY_OPERATION_WRAP" value="1"/>
+    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true"/>
+    <option name="TERNARY_OPERATION_WRAP" value="1"/>
+    <option name="TERNARY_OPERATION_SIGNS_ON_NEXT_LINE" value="true"/>
+    <option name="FOR_STATEMENT_WRAP" value="1"/>
+    <option name="ARRAY_INITIALIZER_WRAP" value="1"/>
+    <option name="IF_BRACE_FORCE" value="3"/>
+    <option name="DOWHILE_BRACE_FORCE" value="3"/>
+    <option name="WHILE_BRACE_FORCE" value="3"/>
+    <option name="FOR_BRACE_FORCE" value="3"/>
+    <option name="PARENT_SETTINGS_INSTALLED" value="true"/>
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="PROTO">
-    <option name="RIGHT_MARGIN" value="80" />
+    <option name="RIGHT_MARGIN" value="80"/>
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="2" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="2"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="protobuf">
-    <option name="RIGHT_MARGIN" value="80" />
+    <option name="RIGHT_MARGIN" value="80"/>
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="2" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="2"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="Python">
-    <option name="KEEP_BLANK_LINES_IN_CODE" value="1" />
-    <option name="RIGHT_MARGIN" value="80" />
-    <option name="ALIGN_MULTILINE_PARAMETERS" value="false" />
-    <option name="PARENT_SETTINGS_INSTALLED" value="true" />
+    <option name="KEEP_BLANK_LINES_IN_CODE" value="1"/>
+    <option name="RIGHT_MARGIN" value="80"/>
+    <option name="ALIGN_MULTILINE_PARAMETERS" value="false"/>
+    <option name="PARENT_SETTINGS_INSTALLED" value="true"/>
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="SASS">
     <indentOptions>
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="SCSS">
     <indentOptions>
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="TypeScript">
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
   </codeStyleSettings>
   <codeStyleSettings language="XML">
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="2" />
-      <option name="TAB_SIZE" value="2" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="2"/>
+      <option name="TAB_SIZE" value="2"/>
     </indentOptions>
     <arrangement>
       <rules>
@@ -224,7 +224,7 @@
             <match>
               <AND>
                 <NAME>xmlns:android</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>^$</XML_NAMESPACE>
               </AND>
             </match>
@@ -235,7 +235,7 @@
             <match>
               <AND>
                 <NAME>xmlns:.*</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>^$</XML_NAMESPACE>
               </AND>
             </match>
@@ -247,7 +247,7 @@
             <match>
               <AND>
                 <NAME>.*:id</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -258,7 +258,7 @@
             <match>
               <AND>
                 <NAME>style</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>^$</XML_NAMESPACE>
               </AND>
             </match>
@@ -269,7 +269,7 @@
             <match>
               <AND>
                 <NAME>.*</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>^$</XML_NAMESPACE>
               </AND>
             </match>
@@ -281,7 +281,7 @@
             <match>
               <AND>
                 <NAME>.*:.*Style</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -293,7 +293,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_width</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -304,7 +304,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_height</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -315,7 +315,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_weight</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -326,7 +326,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_margin</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -337,7 +337,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_marginTop</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -348,7 +348,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_marginBottom</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -359,7 +359,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_marginStart</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -370,7 +370,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_marginEnd</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -381,7 +381,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_marginLeft</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -392,7 +392,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_marginRight</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -403,7 +403,7 @@
             <match>
               <AND>
                 <NAME>.*:layout_.*</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -415,7 +415,7 @@
             <match>
               <AND>
                 <NAME>.*:padding</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -426,7 +426,7 @@
             <match>
               <AND>
                 <NAME>.*:paddingTop</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -437,7 +437,7 @@
             <match>
               <AND>
                 <NAME>.*:paddingBottom</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -448,7 +448,7 @@
             <match>
               <AND>
                 <NAME>.*:paddingStart</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -459,7 +459,7 @@
             <match>
               <AND>
                 <NAME>.*:paddingEnd</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -470,7 +470,7 @@
             <match>
               <AND>
                 <NAME>.*:paddingLeft</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -481,7 +481,7 @@
             <match>
               <AND>
                 <NAME>.*:paddingRight</NAME>
-                <XML_ATTRIBUTE />
+                <XML_ATTRIBUTE/>
                 <XML_NAMESPACE>http://schemas.android.com/apk/res/android</XML_NAMESPACE>
               </AND>
             </match>
@@ -535,62 +535,62 @@
     </arrangement>
   </codeStyleSettings>
   <Objective-C>
-    <option name="INDENT_NAMESPACE_MEMBERS" value="0" />
-    <option name="INDENT_C_STRUCT_MEMBERS" value="2" />
-    <option name="INDENT_CLASS_MEMBERS" value="2" />
-    <option name="INDENT_VISIBILITY_KEYWORDS" value="1" />
-    <option name="INDENT_INSIDE_CODE_BLOCK" value="2" />
-    <option name="KEEP_STRUCTURES_IN_ONE_LINE" value="true" />
-    <option name="FUNCTION_PARAMETERS_WRAP" value="5" />
-    <option name="FUNCTION_CALL_ARGUMENTS_WRAP" value="5" />
-    <option name="TEMPLATE_CALL_ARGUMENTS_WRAP" value="5" />
-    <option name="TEMPLATE_CALL_ARGUMENTS_ALIGN_MULTILINE" value="true" />
-    <option name="ALIGN_INIT_LIST_IN_COLUMNS" value="false" />
-    <option name="SPACE_BEFORE_SUPERCLASS_COLON" value="false" />
+    <option name="INDENT_NAMESPACE_MEMBERS" value="0"/>
+    <option name="INDENT_C_STRUCT_MEMBERS" value="2"/>
+    <option name="INDENT_CLASS_MEMBERS" value="2"/>
+    <option name="INDENT_VISIBILITY_KEYWORDS" value="1"/>
+    <option name="INDENT_INSIDE_CODE_BLOCK" value="2"/>
+    <option name="KEEP_STRUCTURES_IN_ONE_LINE" value="true"/>
+    <option name="FUNCTION_PARAMETERS_WRAP" value="5"/>
+    <option name="FUNCTION_CALL_ARGUMENTS_WRAP" value="5"/>
+    <option name="TEMPLATE_CALL_ARGUMENTS_WRAP" value="5"/>
+    <option name="TEMPLATE_CALL_ARGUMENTS_ALIGN_MULTILINE" value="true"/>
+    <option name="ALIGN_INIT_LIST_IN_COLUMNS" value="false"/>
+    <option name="SPACE_BEFORE_SUPERCLASS_COLON" value="false"/>
   </Objective-C>
   <Objective-C-extensions>
-    <option name="GENERATE_INSTANCE_VARIABLES_FOR_PROPERTIES" value="ASK" />
-    <option name="RELEASE_STYLE" value="IVAR" />
-    <option name="TYPE_QUALIFIERS_PLACEMENT" value="BEFORE" />
+    <option name="GENERATE_INSTANCE_VARIABLES_FOR_PROPERTIES" value="ASK"/>
+    <option name="RELEASE_STYLE" value="IVAR"/>
+    <option name="TYPE_QUALIFIERS_PLACEMENT" value="BEFORE"/>
     <file>
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Import" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Macro" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Typedef" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Enum" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Constant" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Global" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Struct" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="FunctionPredecl" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Function" />
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Import"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Macro"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Typedef"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Enum"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Constant"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Global"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Struct"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="FunctionPredecl"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Function"/>
     </file>
     <class>
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Property" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Synthesize" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InitMethod" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="StaticMethod" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InstanceMethod" />
-      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="DeallocMethod" />
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Property"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Synthesize"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InitMethod"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="StaticMethod"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InstanceMethod"/>
+      <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="DeallocMethod"/>
     </class>
     <extensions>
-      <pair source="cc" header="h" />
-      <pair source="c" header="h" />
+      <pair header="h" source="cc"/>
+      <pair header="h" source="c"/>
     </extensions>
   </Objective-C-extensions>
   <codeStyleSettings language="ObjectiveC">
-    <option name="RIGHT_MARGIN" value="80" />
-    <option name="KEEP_BLANK_LINES_BEFORE_RBRACE" value="1" />
-    <option name="BLANK_LINES_BEFORE_IMPORTS" value="0" />
-    <option name="BLANK_LINES_AFTER_IMPORTS" value="0" />
-    <option name="BLANK_LINES_AROUND_CLASS" value="0" />
-    <option name="BLANK_LINES_AROUND_METHOD" value="0" />
-    <option name="BLANK_LINES_AROUND_METHOD_IN_INTERFACE" value="0" />
-    <option name="ALIGN_MULTILINE_BINARY_OPERATION" value="false" />
-    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
-    <option name="FOR_STATEMENT_WRAP" value="1" />
-    <option name="ASSIGNMENT_WRAP" value="1" />
+    <option name="RIGHT_MARGIN" value="80"/>
+    <option name="KEEP_BLANK_LINES_BEFORE_RBRACE" value="1"/>
+    <option name="BLANK_LINES_BEFORE_IMPORTS" value="0"/>
+    <option name="BLANK_LINES_AFTER_IMPORTS" value="0"/>
+    <option name="BLANK_LINES_AROUND_CLASS" value="0"/>
+    <option name="BLANK_LINES_AROUND_METHOD" value="0"/>
+    <option name="BLANK_LINES_AROUND_METHOD_IN_INTERFACE" value="0"/>
+    <option name="ALIGN_MULTILINE_BINARY_OPERATION" value="false"/>
+    <option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true"/>
+    <option name="FOR_STATEMENT_WRAP" value="1"/>
+    <option name="ASSIGNMENT_WRAP" value="1"/>
     <indentOptions>
-      <option name="INDENT_SIZE" value="2" />
-      <option name="CONTINUATION_INDENT_SIZE" value="4" />
+      <option name="INDENT_SIZE" value="2"/>
+      <option name="CONTINUATION_INDENT_SIZE" value="4"/>
     </indentOptions>
   </codeStyleSettings>
 </code_scheme>