azkaban-uncached
Changes
build.xml 39(+36 -3)
src/java/azkaban/utils/LogSummary.java 349(+349 -0)
src/tl/flowsummary.tl 209(+209 -0)
src/web/css/azkaban.css 2(+1 -1)
src/web/css/bootstrap-azkaban.css 4(+4 -0)
src/web/js/azkaban.exflow.view.js 12(+6 -6)
src/web/js/azkaban.flow.view.js 145(+135 -10)
src/web/js/azkaban.jobdetails.view.js 341(+341 -0)
src/web/js/dust-core-2.2.2.min.js 9(+9 -0)
Details
build.xml 39(+36 -3)
diff --git a/build.xml b/build.xml
index 86db40b..7b1479b 100644
--- a/build.xml
+++ b/build.xml
@@ -4,6 +4,7 @@
<property file="build.properties" />
<property name="base.dir" value="${basedir}" />
<property name="dist.jar.dir" value="${basedir}/dist/jars" />
+ <property name="dist.dust.dir" value="${basedir}/dist/dust" />
<property name="dist.classes.dir" value="${basedir}/dist/classes" />
<property name="dist.packages.dir" value="${basedir}/dist/packages" />
<property name="dist.web.package.dir" value="${dist.packages.dir}/azkaban-web-server" />
@@ -19,6 +20,7 @@
<property name="lib.dir" value="${basedir}/lib" />
<property name="bin.dir" value="${basedir}/bin" />
<property name="java.src.dir" value="${basedir}/src/java" />
+ <property name="dust.src.dir" value="${basedir}/src/tl" />
<property name="web.src.dir" value="${basedir}/src/web" />
<property name="sql.src.dir" value="${basedir}/src/sql" />
@@ -48,6 +50,8 @@
<target name="build" description="Compile main source tree java files">
<delete dir="${dist.classes.dir}" />
<mkdir dir="${dist.classes.dir}" />
+ <delete dir="${dist.dust.dir}" />
+ <mkdir dir="${dist.dust.dir}" />
<!-- copy non-java files to classes dir to load from classpath -->
<copy todir="${dist.classes.dir}">
@@ -61,6 +65,21 @@
<src path="${java.src.dir}" />
<classpath refid="main.classpath" />
</javac>
+
+ <!-- Compile dustjs templates -->
+ <!-- Note: Because apply does not support multiple srcfile and targetfile
+ elements, and for and foreach requires ant-contrib, we use targetfile
+ for the template name parameter and then redirect the output of dustc
+ to the final output file -->
+ <apply dir="${dust.src.dir}" executable="dustc" relative="true">
+ <mapper type="glob" from="*.tl" to="*" />
+ <targetfile prefix="--name=" />
+ <srcfile />
+ <fileset dir="${dust.src.dir}" includes="*.tl" />
+ <redirector>
+ <outputmapper id="out" type="glob" from="*.tl" to="${dist.dust.dir}/*.js" />
+ </redirector>
+ </apply>
</target>
<target name="jars" depends="build" description="Create azkaban jar">
@@ -147,17 +166,22 @@
</copy>
<!-- Copy bin files for web server only-->
- <copy todir="${dist.web.package.dir}/bin" >
+ <copy todir="${dist.web.package.dir}/bin">
<fileset dir="${web.package.dir}/bin"/>
</copy>
<!-- Copy web files -->
- <copy todir="${dist.web.package.dir}/web" >
+ <copy todir="${dist.web.package.dir}/web">
<fileset dir="${web.src.dir}" />
</copy>
+
+ <!-- Copy compiled dust templates -->
+ <copy todir="${dist.web.package.dir}/web/js">
+ <fileset dir="${dist.dust.dir}" />
+ </copy>
<!-- Copy conf create table scripts -->
- <copy todir="${dist.web.package.dir}/conf" >
+ <copy todir="${dist.web.package.dir}/conf">
<fileset dir="${web.package.dir}/conf" />
</copy>
@@ -241,6 +265,11 @@
<fileset dir="${web.src.dir}" />
</copy>
+ <!-- Copy compiled dust templates -->
+ <copy todir="${dist.solo.package.dir}/web/js">
+ <fileset dir="${dist.dust.dir}" />
+ </copy>
+
<!-- Copy sql files -->
<copy todir="${dist.solo.package.dir}/sql" >
<fileset dir="${sql.src.dir}" />
@@ -259,4 +288,8 @@
<target name="package-all" depends="package-exec-server, package-web-server, package-solo-server, package-sql-scripts" description="Create all packages">
</target>
+
+ <target name="package" depends="package-all" description="Create all packages">
+ </target>
+
</project>
src/java/azkaban/utils/LogSummary.java 349(+349 -0)
diff --git a/src/java/azkaban/utils/LogSummary.java b/src/java/azkaban/utils/LogSummary.java
new file mode 100644
index 0000000..c9e5495
--- /dev/null
+++ b/src/java/azkaban/utils/LogSummary.java
@@ -0,0 +1,349 @@
+package azkaban.utils;
+
+import azkaban.utils.FileIOUtils.LogData;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class LogSummary {
+ private static final String HIVE_PARSING_START = "Parsing command: ";
+ private static final String HIVE_PARSING_END = "Parse Completed";
+ private static final String HIVE_NUM_MAP_REDUCE_JOBS_STRING = "Total MapReduce jobs = ";
+ private static final String HIVE_MAP_REDUCE_JOB_START = "Starting Job";
+ private static final String HIVE_MAP_REDUCE_JOBS_SUMMARY = "MapReduce Jobs Launched:";
+
+ // Regex to search for URLs to job details pages.
+ private static final Pattern jobTrackerUrl = Pattern.compile(
+ "https?://" + // http(s)://
+ "[-\\w\\.]+" + // domain
+ "(?::\\d+)?" + // port
+ "/[\\w/\\.]*" + // path
+ // query string
+ "\\?\\S+" +
+ "(job_\\d{12}_\\d{4,})" + // job id
+ "\\S*"
+ );
+
+ private String jobType = null;
+ private List<Pair<String,String>> commandProperties = new ArrayList<Pair<String,String>>();
+
+ private String[] pigStatTableHeaders = null;
+ private List<String[]> pigStatTableData = new ArrayList<String[]>();
+
+ private String[] pigSummaryTableHeaders = null;
+ private List<String[]> pigSummaryTableData = new ArrayList<String[]>();
+
+ private List<String> hiveQueries = new ArrayList<String>();
+
+ // Each element in hiveQueryJobs contains a list of the jobs for a query.
+ // Each job contains a list of strings of the job summary values.
+ private List<List<List<String>>> hiveQueryJobs = new ArrayList<List<List<String>>>();
+
+ public LogSummary(LogData log) {
+ if (log != null) {
+ parseLogData(log.getData());
+ }
+ }
+
+ private void parseLogData(String data) {
+ // Filter out all the timestamps
+ data = data.replaceAll("(?m)^.*? - ", "");
+ String[] lines = data.split("\n");
+
+ if (parseCommand(lines)) {
+ jobType = parseJobType(lines);
+
+ if (jobType.contains("pig")) {
+ parsePigJobSummary(lines);
+ parsePigJobStats(lines);
+ } else if (jobType.contains("hive")) {
+ parseHiveQueries(lines);
+ }
+ }
+ }
+
+ private String parseJobType(String[] lines) {
+ Pattern p = Pattern.compile("Building (\\S+) job executor");
+
+ for (String line : lines) {
+ Matcher m = p.matcher(line);
+ if (m.find()) {
+ return m.group(1);
+ }
+ }
+
+ return null;
+ }
+
+ private boolean parseCommand(String[] lines) {
+ int commandStartIndex = -1;
+ for (int i = 0; i < lines.length; i++) {
+ if (lines[i].startsWith("Command: ")) {
+ commandStartIndex = i;
+ break;
+ }
+ }
+
+ if (commandStartIndex != -1) {
+ String command = lines[commandStartIndex].substring(9);
+ commandProperties.add(new Pair<String,String>("Command", command));
+
+ // Parse classpath
+ Pattern p = Pattern.compile("(?:-cp|-classpath)\\s+(\\S+)");
+ Matcher m = p.matcher(command);
+ StringBuilder sb = new StringBuilder();
+ if (m.find()) {
+ sb.append(StringUtils.join((Collection<String>)Arrays.asList(m.group(1).split(":")), "<br/>"));
+ commandProperties.add(new Pair<String,String>("Classpath", sb.toString()));
+ }
+
+ // Parse environment variables
+ p = Pattern.compile("-D(\\S+)");
+ m = p.matcher(command);
+ sb = new StringBuilder();
+ while (m.find()) {
+ sb.append(m.group(1) + "<br/>");
+ }
+ if (sb.length() > 0) {
+ commandProperties.add(new Pair<String,String>("-D", sb.toString()));
+ }
+
+ // Parse memory settings
+ p = Pattern.compile("(-Xm\\S+)");
+ m = p.matcher(command);
+ sb = new StringBuilder();
+ while (m.find()) {
+ sb.append(m.group(1) + "<br/>");
+ }
+ if (sb.length() > 0) {
+ commandProperties.add(new Pair<String,String>("Memory Settings", sb.toString()));
+ }
+
+ // Parse Pig params
+ p = Pattern.compile("-param\\s+(\\S+)");
+ m = p.matcher(command);
+ sb = new StringBuilder();
+ while (m.find()) {
+ sb.append(m.group(1) + "<br/>");
+ }
+ if (sb.length() > 0) {
+ commandProperties.add(new Pair<String,String>("Params", sb.toString()));
+ }
+
+ return true;
+ }
+
+ return false;
+ }
+
+ private void parsePigJobSummary(String[] lines) {
+ int jobSummaryStartIndex = -1;
+ for (int i = 0; i < lines.length; i++) {
+ if (lines[i].startsWith("HadoopVersion")) {
+ jobSummaryStartIndex = i;
+ break;
+ }
+ }
+
+ if (jobSummaryStartIndex != -1) {
+ String headerLine = lines[jobSummaryStartIndex];
+ pigSummaryTableHeaders = headerLine.split("\t");
+
+ int tableRowIndex = jobSummaryStartIndex + 1;
+ String line;
+ while (!(line = lines[tableRowIndex]).equals("")) {
+ pigSummaryTableData.add(line.split("\t"));
+ tableRowIndex++;
+ }
+ }
+ }
+
+ /**
+ * Parses the Pig Job Stats table that includes the max/min mapper and reduce times.
+ * Adds links to the job details pages on the job tracker.
+ * @param lines
+ */
+ private void parsePigJobStats(String[] lines) {
+ int jobStatsStartIndex = -1;
+
+ Map<String, String> jobDetailUrls = new HashMap<String, String>();
+
+
+
+ for (int i = 0; i < lines.length; i++) {
+ String line = lines[i];
+ Matcher m = jobTrackerUrl.matcher(line);
+
+ if (m.find()) {
+ jobDetailUrls.put(m.group(1), m.group(0));
+ }
+ else if (line.startsWith("Job Stats (time in seconds):")) {
+ jobStatsStartIndex = i+1;
+ break;
+ }
+ }
+
+ if (jobStatsStartIndex != -1) {
+ String headerLine = lines[jobStatsStartIndex];
+ pigStatTableHeaders = headerLine.split("\t");
+
+ int tableRowIndex = jobStatsStartIndex + 1;
+ String line;
+ while (!(line = lines[tableRowIndex]).equals("")) {
+ String[] stats = line.split("\t");
+ if (jobDetailUrls.containsKey(stats[0])) {
+ stats[0] = "<a href=\"" + jobDetailUrls.get(stats[0]) + "\">" + stats[0] + "</a>";
+ }
+ pigStatTableData.add(stats);
+ tableRowIndex++;
+ }
+ }
+ }
+
+ private void parseHiveQueries(String[] lines) {
+ for (int i = 0; i < lines.length;) {
+ String line = lines[i];
+ int parsingCommandIndex = line.indexOf(HIVE_PARSING_START);
+ if (parsingCommandIndex != -1) {
+ // parse query text
+ int queryStartIndex = parsingCommandIndex + HIVE_PARSING_START.length();
+ StringBuilder query = new StringBuilder(line.substring(queryStartIndex) + "\n");
+
+ i++;
+ while (i < lines.length && !(line = lines[i]).contains(HIVE_PARSING_END)) {
+ query.append(line + "\n");
+ i++;
+ }
+ String queryString = query.toString().trim().replaceAll("\n","<br/>");
+ hiveQueries.add(queryString);
+ i++;
+
+ // parse the query's Map-Reduce jobs, if any.
+ int numMRJobs = 0;
+ List<String> jobTrackerUrls = new ArrayList<String>();
+ while (i < lines.length) {
+ line = lines[i];
+ if (line.contains(HIVE_NUM_MAP_REDUCE_JOBS_STRING)) {
+ // query involves map reduce jobs
+ numMRJobs = Integer.parseInt(line.substring(HIVE_NUM_MAP_REDUCE_JOBS_STRING.length()));
+ i++;
+
+ // get the job tracker URLs
+ String lastUrl = "";
+ int numJobsSeen = 0;
+ while (numJobsSeen < numMRJobs && i < lines.length) {
+ line = lines[i];
+ if (line.contains(HIVE_MAP_REDUCE_JOB_START)) {
+ Matcher m = jobTrackerUrl.matcher(line);
+ if (m.find() && !lastUrl.equals(m.group(1))) {
+ jobTrackerUrls.add(m.group(0));
+ lastUrl = m.group(1);
+ numJobsSeen++;
+ }
+ }
+ i++;
+ }
+
+ // get the map reduce jobs summary
+ while (i < lines.length) {
+ line = lines[i];
+ if (line.contains(HIVE_MAP_REDUCE_JOBS_SUMMARY)) {
+ // job summary table found
+ i++;
+
+ List<List<String>> queryJobs = new ArrayList<List<String>>();
+
+ Pattern p = Pattern.compile(
+ "Job (\\d+): Map: (\\d+) Reduce: (\\d+) HDFS Read: (\\d+) HDFS Write: (\\d+)"
+ );
+
+ int previousJob = -1;
+ numJobsSeen = 0;
+ while (numJobsSeen < numMRJobs && i < lines.length) {
+ line = lines[i];
+ Matcher m = p.matcher(line);
+ if (m.find()) {
+ int currJob = Integer.parseInt(m.group(1));
+ if (currJob == previousJob) {
+ i++;
+ continue;
+ }
+
+ List<String> job = new ArrayList<String>();
+ job.add("<a href=\"" + jobTrackerUrls.get(currJob) +
+ "\">" + currJob + "</a>");
+ job.add(m.group(2));
+ job.add(m.group(3));
+ job.add(m.group(4));
+ job.add(m.group(5));
+ queryJobs.add(job);
+ previousJob = currJob;
+ numJobsSeen++;
+ }
+ i++;
+ }
+
+ if (numJobsSeen == numMRJobs) {
+ hiveQueryJobs.add(queryJobs);
+ }
+
+ break;
+ }
+ i++;
+ }
+ break;
+ }
+ else if (line.contains(HIVE_PARSING_START)) {
+ if (numMRJobs == 0) {
+ hiveQueryJobs.add(null);
+ }
+ break;
+ }
+ i++;
+ }
+ continue;
+ }
+
+ i++;
+ }
+ return;
+ }
+
+ public String[] getPigStatTableHeaders() {
+ return pigStatTableHeaders;
+ }
+
+ public List<String[]> getPigStatTableData() {
+ return pigStatTableData;
+ }
+
+ public String[] getPigSummaryTableHeaders() {
+ return pigSummaryTableHeaders;
+ }
+
+ public List<String[]> getPigSummaryTableData() {
+ return pigSummaryTableData;
+ }
+
+ public String getJobType() {
+ return jobType;
+ }
+
+ public List<Pair<String,String>> getCommandProperties() {
+ return commandProperties;
+ }
+
+ public List<String> getHiveQueries() {
+ return hiveQueries;
+ }
+
+ public List<List<List<String>>> getHiveQueryJobs() {
+ return hiveQueryJobs;
+ }
+}
diff --git a/src/java/azkaban/webapp/servlet/ExecutorServlet.java b/src/java/azkaban/webapp/servlet/ExecutorServlet.java
index 95847a1..ad572ed 100644
--- a/src/java/azkaban/webapp/servlet/ExecutorServlet.java
+++ b/src/java/azkaban/webapp/servlet/ExecutorServlet.java
@@ -44,6 +44,7 @@ import azkaban.user.Permission;
import azkaban.user.User;
import azkaban.user.Permission.Type;
import azkaban.utils.FileIOUtils.LogData;
+import azkaban.utils.LogSummary;
import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.session.Session;
@@ -71,7 +72,7 @@ public class ExecutorServlet extends LoginAbstractAzkabanServlet {
}
else if (hasParam(req, "execid")) {
if (hasParam(req, "job")) {
- handleExecutionJobPage(req, resp, session);
+ handleExecutionJobDetailsPage(req, resp, session);
}
else {
handleExecutionFlowPage(req, resp, session);
@@ -82,8 +83,8 @@ public class ExecutorServlet extends LoginAbstractAzkabanServlet {
}
}
- private void handleExecutionJobPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
- Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/joblogpage.vm");
+ private void handleExecutionJobDetailsPage(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
+ Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/jobdetailspage.vm");
User user = session.getUser();
int execId = getIntParam(req, "execid");
String jobId = getParam(req, "job");
@@ -96,7 +97,7 @@ public class ExecutorServlet extends LoginAbstractAzkabanServlet {
try {
flow = executorManager.getExecutableFlow(execId);
if (flow == null) {
- page.add("errorMsg", "Error loading executing flow " + execId + " not found.");
+ page.add("errorMsg", "Error loading executing flow " + execId + ": not found.");
page.render();
return;
}
@@ -262,6 +263,9 @@ public class ExecutorServlet extends LoginAbstractAzkabanServlet {
else if (ajaxName.equals("fetchExecJobLogs")) {
ajaxFetchJobLogs(req, resp, ret, session.getUser(), exFlow);
}
+ else if (ajaxName.equals("fetchExecJobSummary")) {
+ ajaxFetchJobSummary(req, resp, ret, session.getUser(), exFlow);
+ }
else if (ajaxName.equals("retryFailedJobs")) {
ajaxRestartFailed(req, resp, ret, session.getUser(), exFlow);
}
@@ -442,6 +446,53 @@ public class ExecutorServlet extends LoginAbstractAzkabanServlet {
throw new ServletException(e);
}
}
+
+ /**
+ * Gets the job summary.
+ *
+ * @param req
+ * @param resp
+ * @param user
+ * @param exFlow
+ * @throws ServletException
+ */
+ private void ajaxFetchJobSummary(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, ExecutableFlow exFlow) throws ServletException {
+ Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
+ if (project == null) {
+ return;
+ }
+
+ String jobId = this.getParam(req, "jobId");
+ resp.setCharacterEncoding("utf-8");
+
+ try {
+ ExecutableNode node = exFlow.getExecutableNode(jobId);
+ if (node == null) {
+ ret.put("error", "Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
+ return;
+ }
+
+ int attempt = this.getIntParam(req, "attempt", node.getAttempt());
+ LogData data = executorManager.getExecutionJobLog(exFlow, jobId, 0, Integer.MAX_VALUE, attempt);
+
+ LogSummary summary = new LogSummary(data);
+ ret.put("commandProperties", summary.getCommandProperties());
+
+ String jobType = summary.getJobType();
+
+ if (jobType.contains("pig")) {
+ ret.put("summaryTableHeaders", summary.getPigSummaryTableHeaders());
+ ret.put("summaryTableData", summary.getPigSummaryTableData());
+ ret.put("statTableHeaders", summary.getPigStatTableHeaders());
+ ret.put("statTableData", summary.getPigStatTableData());
+ } else if (jobType.contains("hive")) {
+ ret.put("hiveQueries", summary.getHiveQueries());
+ ret.put("hiveQueryJobs", summary.getHiveQueryJobs());
+ }
+ } catch (ExecutorManagerException e) {
+ throw new ServletException(e);
+ }
+ }
private void ajaxFetchFlowInfo(HttpServletRequest req, HttpServletResponse resp, HashMap<String, Object> ret, User user, String projectName, String flowId) throws ServletException {
Project project = getProjectAjaxByPermission(ret, projectName, user, Type.READ);
diff --git a/src/java/azkaban/webapp/servlet/velocity/executingflowpage.vm b/src/java/azkaban/webapp/servlet/velocity/executingflowpage.vm
index e9cf0d5..e21ca51 100644
--- a/src/java/azkaban/webapp/servlet/velocity/executingflowpage.vm
+++ b/src/java/azkaban/webapp/servlet/velocity/executingflowpage.vm
@@ -147,7 +147,7 @@
<th class="date">End Time</th>
<th class="elapse">Elapsed</th>
<th class="status">Status</th>
- <th class="logs">Logs</th>
+ <th class="logs">Details</th>
</tr>
</thead>
<tbody id="executableBody">
diff --git a/src/java/azkaban/webapp/servlet/velocity/flowpage.vm b/src/java/azkaban/webapp/servlet/velocity/flowpage.vm
index a92c036..b9833fe 100644
--- a/src/java/azkaban/webapp/servlet/velocity/flowpage.vm
+++ b/src/java/azkaban/webapp/servlet/velocity/flowpage.vm
@@ -22,6 +22,9 @@
#parse("azkaban/webapp/servlet/velocity/javascript.vm")
<script type="text/javascript" src="${context}/js/jqueryui/jquery-ui-1.10.1.custom.js"></script>
+
+ <script type="text/javascript" src="${context}/js/dust-core-2.2.2.min.js"></script>
+ <script type="text/javascript" src="${context}/js/flowsummary.js"></script>
<script type="text/javascript" src="${context}/js/azkaban.date.utils.js"></script>
<script type="text/javascript" src="${context}/js/azkaban.ajax.utils.js"></script>
<script type="text/javascript" src="${context}/js/azkaban.common.utils.js"></script>
@@ -97,8 +100,9 @@
<div class="row">
<div class="col-lg-8">
<ul class="nav nav-pills" id="headertabs">
- <li id="graphViewLink"><a href="#">Graph</a></li>
- <li id="executionsViewLink"><a href="#">Executions</a></li>
+ <li id="graphViewLink"><a href="#graph">Graph</a></li>
+ <li id="executionsViewLink"><a href="#executions">Executions</a></li>
+ <li id="summaryViewLink"><a href="#summary">Summary</a></li>
</ul>
</div>
<div class="col-lg-4">
@@ -108,7 +112,7 @@
<div class="clearfix"></div>
</div>
</div>
-
+
## Graph view.
#parse ("azkaban/webapp/servlet/velocity/flowgraphview.vm")
@@ -144,6 +148,13 @@
</div>
</div>
+ ## Summary view.
+
+ <div class="row">
+ <div class="col-lg-12" id="summaryView">
+ </div>
+ </div><!-- /#summaryView -->
+
<div id="contextMenu">
</div>
src/tl/flowsummary.tl 209(+209 -0)
diff --git a/src/tl/flowsummary.tl b/src/tl/flowsummary.tl
new file mode 100644
index 0000000..85c9a99
--- /dev/null
+++ b/src/tl/flowsummary.tl
@@ -0,0 +1,209 @@
+ <div class="panel panel-default">
+ <div class="panel-heading">General</div>
+ <table class="table table-striped table-bordered table-condensed table-hover">
+ <tbody>
+ <tr>
+ <td class="worksheet-key">Workflow name</td>
+ <td>{general.flowId}</td>
+ </tr>
+ <tr class="editRow">
+ <td class="worksheet-key">Workflow Purpose/Description</td>
+ <td class="editable"><span class="spanValue">{general.flowDescription}</span></td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Project name</td>
+ <td>{general.projectName}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Name of scheduled item</td>
+ <td>{general.flowId}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Workflow Hadoop User Name</td>
+ <td>{general.user}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Responsible team</td>
+ <td class="editable"><span class="spanValue">{general.team}</span></td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Responsible manager</td>
+ <td class="editable"><span class="spanValue">{general.manager}</span></td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Responsible Developer</td>
+ <td class="editable"><span class="spanValue">{general.developer}</span></td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Hadoop APIs Used</td>
+ <td>{general.apis}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Additional Hadoop APIs Used</td>
+ <td>{general.additionalApis}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Streaming Languages Used</td>
+ <td>{general.streamingLanguages}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Code Repository</td>
+ <td>{general.codeRepository}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Does this workflow perform any second, third, or higher level connection graph calculations?</td>
+ <td>{general.higherLevelGraphOperations}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Can this workflow be run with an empty <strong>/jobs</strong> directory</td>
+ <td>{general.emptyJobsDirectory}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">If not, why not?</td>
+ <td>{general.emptyJobsDirectoryReason}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">How does the result get to front-end servers?</td>
+ <td>{general.toFrontEnd}</td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Scheduling</div>
+ <table class="table table-striped table-bordered table-condensed table-hover">
+ <tbody>
+ <tr>
+ <td class="worksheet-key">Max Map Slots from Largest Job</td>
+ <td>{scheduling.maxMapSlots}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Max Reduce Slots from Largest Job</td>
+ <td>{scheduling.maxReduceSlots}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Total Reduce Slots from All Jobs</td>
+ <td>{scheduling.totalReduceSlots}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Total Number of Jobs</td>
+ <td>{scheduling.numJobs}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Longest Task Time</td>
+ <td>{scheduling.longestTaskTime}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Required Schedule</td>
+ <td>{scheduling.schedule}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Launch Time</td>
+ <td>{scheduling.launchTime}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Total Workflow Run Time (hours)</td>
+ <td>{scheduling.totalFlowTime}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Expected Time of Completion</td>
+ <td>{scheduling.expectedCompletionTime}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Max Permitted Delay</td>
+ <td>{scheduling.maxPermittedDelay}</td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Resources</div>
+ <table class="table table-striped table-bordered table-condensed table-hover">
+ <tbody>
+ <tr>
+ <td class="worksheet-key">Is any of the code specifically multi-threaded?</td>
+ <td colspan="3">{resources.multithreaded}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Do you 'fat jar' any hadoop-core jars?</td>
+ <td colspan="3">{resources.fatJar}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Which job has the largest spill count?</td>
+ <td>{resources.largestSpill.job}</td>
+ <td class="worksheet-key">Largest spill count for any given task?</td>
+ <td>{resources.largestSpill.count}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Is there a distributed cache in use?</td>
+ <td>{resources.distributedCache.using}</td>
+ <td class="worksheet-key">How big is the distributed cache?</td>
+ <td>{resources.distributedCache.size}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Size of largest <code>-Xmx</code> value?</td>
+ <td>{resources.largestXmx.size}</td>
+ <td class="worksheet-key">If this is above 1G, please explain why</td>
+ <td>{resources.largestXmx.reason}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Do any jobs use <code>-Xms</code>?</td>
+ <td>{resources.xms.using}</td>
+ <td class="worksheet-key">If so, why?</td>
+ <td>{resources.xms.reason}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Is <em>intermediate compression</em> specifically turned on?</td>
+ <td>{resources.intermediateCompression.on}</td>
+ <td class="worksheet-key">If so, which codec</td>
+ <td>{resources.intermediateCompression.codec}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Are there combiners in use?</td>
+ <td colspan="3">{resources.combiners}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Size of largest <code>mapred.job.map.memory.mb</code></td>
+ <td>{resources.largestMapredJobMapMemoryMb.size}</td>
+ <td class="worksheet-key">Used by job</td>
+ <td>{resources.largestMapredJobMapMemoryMb.job}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Size of largest <code>mapred.job.reduce.memory.mb</code></td>
+ <td>{resources.largestMapredJobReduceMemoryMb.size}</td>
+ <td class="worksheet-key">Used by job</td>
+ <td>{resources.largestMapredJobMapMemoryMb.job}</td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
+
+ <div class="panel panel-default">
+ <div class="panel-heading">Input/Output</div>
+ <table class="table table-striped table-bordered table-condensed table-hover">
+ <tbody>
+ <tr>
+ <td class="worksheet-key">List of input HDFS file paths</td>
+ <td>{io.hdfsPaths}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Number of files generated (hadoop dfs -count)</td>
+ <td>{io.hdfsFileCount}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Average size of files generated in GB</td>
+ <td>{io.averageFileSize}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Size of intermediate data (content of /jobs - output going to production) in GB on HDFS</td>
+ <td>{io.intermediateFileSize}</td>
+ </tr>
+ <tr>
+ <td class="worksheet-key">Size of final output data in GB on HDFS</td>
+ <td>{io.finalOutputSize}</td>
+ </tr>
+ </tbody>
+ </table>
+ </div>
src/web/css/azkaban.css 2(+1 -1)
diff --git a/src/web/css/azkaban.css b/src/web/css/azkaban.css
index aadf527..41e0af3 100644
--- a/src/web/css/azkaban.css
+++ b/src/web/css/azkaban.css
@@ -2422,7 +2422,7 @@ span.sublabel {
width: 100px;
}
-.executionInfo table th.logs {
+.executionInfo table th.details {
width: 10px;
}
src/web/css/bootstrap-azkaban.css 4(+4 -0)
diff --git a/src/web/css/bootstrap-azkaban.css b/src/web/css/bootstrap-azkaban.css
index daaacd6..72faaa9 100644
--- a/src/web/css/bootstrap-azkaban.css
+++ b/src/web/css/bootstrap-azkaban.css
@@ -13,3 +13,7 @@
.flow-expander {
cursor: pointer;
}
+
+table .worksheet-key {
+ width: 25%;
+}
src/web/js/azkaban.exflow.view.js 12(+6 -6)
diff --git a/src/web/js/azkaban.exflow.view.js b/src/web/js/azkaban.exflow.view.js
index b571b56..0d70f37 100644
--- a/src/web/js/azkaban.exflow.view.js
+++ b/src/web/js/azkaban.exflow.view.js
@@ -460,7 +460,7 @@ azkaban.ExecutionListView = Backbone.View.extend({
var tdEnd = document.createElement("td");
var tdElapse = document.createElement("td");
var tdStatus = document.createElement("td");
- var tdLog = document.createElement("td");
+ var tdDetails = document.createElement("td");
$(tr).append(tdName);
$(tr).append(tdTimeline);
@@ -468,7 +468,7 @@ azkaban.ExecutionListView = Backbone.View.extend({
$(tr).append(tdEnd);
$(tr).append(tdElapse);
$(tr).append(tdStatus);
- $(tr).append(tdLog);
+ $(tr).append(tdDetails);
$(tr).attr("id", node.id + "-row");
$(tdTimeline).attr("id", node.id + "-timeline");
$(tdStart).attr("id", node.id + "-start");
@@ -507,10 +507,10 @@ azkaban.ExecutionListView = Backbone.View.extend({
var a = document.createElement("a");
$(a).attr("href", logURL);
$(a).attr("id", node.id + "-log-link");
- $(a).text("Log");
- $(tdLog).addClass("logLink");
- $(tdLog).append(a);
-
+ $(a).text("Details");
+ $(tdDetails).addClass("details");
+ $(tdDetails).append(a);
+
executingBody.append(tr);
}
});
src/web/js/azkaban.flow.view.js 145(+135 -10)
diff --git a/src/web/js/azkaban.flow.view.js b/src/web/js/azkaban.flow.view.js
index c08bd5e..8d8eb6e 100644
--- a/src/web/js/azkaban.flow.view.js
+++ b/src/web/js/azkaban.flow.view.js
@@ -31,11 +31,12 @@ var statusStringMap = {
var handleJobMenuClick = function(action, el, pos) {
var jobid = el[0].jobid;
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobid;
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" +
+ flowId + "&job=" + jobid;
if (action == "open") {
window.location.href = requestURL;
}
- else if(action == "openwindow") {
+ else if (action == "openwindow") {
window.open(requestURL);
}
}
@@ -44,9 +45,10 @@ var flowTabView;
azkaban.FlowTabView = Backbone.View.extend({
events: {
"click #graphViewLink": "handleGraphLinkClick",
- "click #executionsViewLink": "handleExecutionLinkClick"
+ "click #executionsViewLink": "handleExecutionLinkClick",
+ "click #summaryViewLink": "handleSummaryLinkClick"
},
-
+
initialize: function(settings) {
var selectedView = settings.selectedView;
if (selectedView == "executions") {
@@ -64,19 +66,33 @@ azkaban.FlowTabView = Backbone.View.extend({
handleGraphLinkClick: function(){
$("#executionsViewLink").removeClass("active");
$("#graphViewLink").addClass("active");
+ $('#summaryViewLink').removeClass('active');
$("#executionsView").hide();
$("#graphView").show();
+ $('#summaryView').hide();
},
handleExecutionLinkClick: function() {
$("#graphViewLink").removeClass("active");
$("#executionsViewLink").addClass("active");
+ $('#summaryViewLink').removeClass('active');
$("#graphView").hide();
$("#executionsView").show();
+ $('#summaryView').hide();
executionModel.trigger("change:view");
- }
+ },
+
+ handleSummaryLinkClick: function() {
+ $('#graphViewLink').removeClass('active');
+ $('#executionsViewLink').removeClass('active');
+ $('#summaryViewLink').addClass('active');
+
+ $('#graphView').hide();
+ $('#executionsView').hide();
+ $('#summaryView').show();
+ },
});
var jobListView;
@@ -247,7 +263,6 @@ azkaban.ExecutionsView = Backbone.View.extend({
if (this.init) {
return;
}
-
console.log("init");
this.handlePageChange(evt);
this.init = true;
@@ -277,10 +292,108 @@ azkaban.ExecutionsView = Backbone.View.extend({
}
});
+var summaryView;
+azkaban.SummaryView = Backbone.View.extend({
+ events: {
+ "click": "closeEditingTarget",
+ "click table .editable": "handleEditField"
+ },
+
+ initialize: function(settings) {
+ console.log("summaryView initialize");
+ var general = {
+ flowName: "",
+ flowDescription: "",
+ projectName: projectName,
+ flowId: flowId
+ };
+
+ var scheduling = {};
+ var resources = {};
+ var io = {};
+
+ this.model.bind('change:view', this.handleChangeView, this);
+ this.model.bind('render', this.render, this);
+ this.model.set({
+ 'general': general,
+ 'scheduling': scheduling,
+ 'resources': resources,
+ 'io': io
+ });
+ this.model.trigger('render');
+ },
+
+ handleChangeView: function(evt) {
+ console.log("summaryView handleChangeView");
+ },
+
+ handleEditField: function(evt) {
+ var curTarget = evt.currentTarget;
+ console.log("summaryView handleEditField");
+ if (this.editingTarget != curTarget) {
+ this.closeEditingTarget(evt);
+
+ var text = $(curTarget).children('.spanValue').text();
+ $(curTarget).empty();
+
+ var input = document.createElement('input');
+ $(input).attr('type', 'text');
+ $(input).css('width', '100%');
+ $(input).val(text);
+
+ $(curTarget).addClass('editing');
+ $(curTarget).append(input);
+ $(input).focus();
+ var obj = this;
+ $(input).keypress(function(evt) {
+ if (evt.which == 13) {
+ obj.closeEditingTarget(evt);
+ }
+ });
+ this.editingTarget = curTarget;
+ }
+ evt.preventDefault();
+ evt.stopPropagation();
+ },
+
+ closeEditingTarget: function(evt) {
+ console.log("summaryView closeEditingTarget");
+ if (this.editingTarget != null &&
+ this.editingTarget != evt.target &&
+ this.editingTarget != evt.target.myparent) {
+ var input = $(this.editingTarget).children("input")[0];
+ var text = $(input).val();
+ $(input).remove();
+
+ var valueData = document.createElement("span");
+ $(valueData).addClass("spanValue");
+ $(valueData).text(text);
+
+ $(this.editingTarget).removeClass("editing");
+ $(this.editingTarget).append(valueData);
+ valueData.myparent = this.editingTarget;
+ this.editingTarget = null;
+ }
+ },
+
+ render: function(evt) {
+ console.log("summaryView render");
+ var data = {
+ general: this.model.get('general'),
+ scheduling: this.model.get('scheduling'),
+ resources: this.model.get('resources')
+ };
+ dust.render("flowsummary", data, function(err, out) {
+ $('#summaryView').html(out);
+ });
+ },
+});
+
var exNodeClickCallback = function(event) {
console.log("Node clicked callback");
var jobId = event.currentTarget.jobid;
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" +
+ flowId + "&job=" + jobId;
var menu = [
{title: "Open Job...", callback: function() {window.location.href=requestURL;}},
@@ -293,7 +406,8 @@ var exNodeClickCallback = function(event) {
var exJobClickCallback = function(event) {
console.log("Node clicked callback");
var jobId = event.currentTarget.jobid;
- var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" + flowId + "&job=" + jobId;
+ var requestURL = contextURL + "/manager?project=" + projectName + "&flow=" +
+ flowId + "&job=" + jobId;
var menu = [
{title: "Open Job...", callback: function() {window.location.href=requestURL;}},
@@ -326,6 +440,10 @@ azkaban.GraphModel = Backbone.Model.extend({});
var executionModel;
azkaban.ExecutionModel = Backbone.Model.extend({});
+
+var summaryModel;
+azkaban.SummaryModel = Backbone.Model.extend({});
+
var mainSvgGraphView;
$(function() {
@@ -336,9 +454,14 @@ $(function() {
el: $('#executionsView'),
model: executionModel
});
+ summaryModel = new azkaban.SummaryModel();
+ summaryView = new azkaban.SummaryView({
+ el: $('#summaryView'),
+ model: summaryModel
+ });
flowTabView = new azkaban.FlowTabView({
el: $('#headertabs'),
- selectedView: selected
+ selectedView: selected
});
graphModel = new azkaban.GraphModel();
@@ -412,6 +535,9 @@ $(function() {
if (hash == "#executions") {
flowTabView.handleExecutionLinkClick();
}
+ if (hash == "#summary") {
+ flowTabView.handleSummaryLinkClick();
+ }
else if (hash == "#graph") {
// Redundant, but we may want to change the default.
selected = "graph";
@@ -429,6 +555,5 @@ $(function() {
}
}
};
-
$.get(requestURL, requestData, successHandler, "json");
});
src/web/js/azkaban.jobdetails.view.js 341(+341 -0)
diff --git a/src/web/js/azkaban.jobdetails.view.js b/src/web/js/azkaban.jobdetails.view.js
new file mode 100644
index 0000000..3f2b7a2
--- /dev/null
+++ b/src/web/js/azkaban.jobdetails.view.js
@@ -0,0 +1,341 @@
+/*
+ * Copyright 2012 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+$.namespace('azkaban');
+
+var logModel;
+azkaban.LogModel = Backbone.Model.extend({});
+
+var jobLogView;
+azkaban.JobLogView = Backbone.View.extend({
+ events: {
+ "click #updateLogBtn" : "handleUpdate"
+ },
+ initialize: function(settings) {
+ this.model.set({"offset": 0});
+ this.handleUpdate();
+ },
+ handleUpdate: function(evt) {
+ var requestURL = contextURL + "/executor";
+ var model = this.model;
+ var finished = false;
+
+ var date = new Date();
+ var startTime = date.getTime();
+
+ while (!finished) {
+ var offset = this.model.get("offset");
+ var requestData = {
+ "execid": execId,
+ "jobId": jobId,
+ "ajax":"fetchExecJobLogs",
+ "offset": offset,
+ "length": 50000,
+ "attempt": attempt
+ };
+
+ var successHandler = function(data) {
+ console.log("fetchLogs");
+ if (data.error) {
+ console.log(data.error);
+ finished = true;
+ }
+ else if (data.length == 0) {
+ finished = true;
+ }
+ else {
+ var date = new Date();
+ var endTime = date.getTime();
+ if ((endTime - startTime) > 10000) {
+ finished = true;
+ showDialog("Alert","The log is taking a long time to finish loading. Azkaban has stopped loading them. Please click Refresh to restart the load.");
+ }
+
+ var re = /(https?:\/\/(([-\w\.]+)+(:\d+)?(\/([\w/_\.]*(\?\S+)?)?)?))/g;
+ var log = $("#logSection").text();
+ if (!log) {
+ log = data.data;
+ }
+ else {
+ log += data.data;
+ }
+
+ var newOffset = data.offset + data.length;
+ $("#logSection").text(log);
+ log = $("#logSection").html();
+ log = log.replace(re, "<a href=\"$1\" title=\"\">$1</a>");
+ $("#logSection").html(log);
+
+ model.set({"offset": newOffset, "log": log});
+ $(".logViewer").scrollTop(9999);
+ }
+ }
+
+ $.ajax({
+ url: requestURL,
+ type: "get",
+ async: false,
+ data: requestData,
+ dataType: "json",
+ error: function(data) {
+ console.log(data);
+ finished = true;
+ },
+ success: successHandler
+ });
+ }
+ }
+});
+
+var summaryModel;
+azkaban.SummaryModel = Backbone.Model.extend({});
+
+var jobSummaryView;
+azkaban.JobSummaryView = Backbone.View.extend({
+ events: {
+ "click #updateSummaryBtn" : "handleUpdate"
+ },
+ initialize: function(settings) {
+ this.handleUpdate();
+ },
+ handleUpdate: function(evt) {
+ var requestURL = contextURL + "/executor";
+ var model = this.model;
+ var self = this;
+
+ var requestData = {
+ "execid": execId,
+ "jobId": jobId,
+ "ajax":"fetchExecJobSummary",
+ "attempt": attempt
+ };
+
+ $.ajax({
+ url: requestURL,
+ dataType: "json",
+ data: requestData,
+ error: function(data) {
+ console.log(data);
+ },
+ success: function(data) {
+ console.log("fetchSummary");
+ if (data.error) {
+ console.log(data.error);
+ }
+ else {
+ self.renderCommandTable(data.commandProperties);
+ self.renderJobTable(data.summaryTableHeaders, data.summaryTableData, "summary");
+ self.renderJobTable(data.statTableHeaders, data.statTableData, "stats");
+ self.renderHiveTable(data.hiveQueries, data.hiveQueryJobs);
+ }
+ }
+ });
+ },
+ renderCommandTable: function(commandProperties) {
+ if (commandProperties) {
+ var commandTable = $("#commandTable");
+
+ for (var i = 0; i < commandProperties.length; i++) {
+ var prop = commandProperties[i];
+ var tr = document.createElement("tr");
+ var name = document.createElement("td");
+ var value = document.createElement("td");
+ $(name).html("<b>" + prop.first + "</b>");
+ $(value).html(prop.second);
+ $(tr).append(name);
+ $(tr).append(value);
+ commandTable.append(tr);
+ }
+ }
+ },
+ renderJobTable: function(headers, data, prefix) {
+ if (headers) {
+ // Add table headers
+ var header = $("#" + prefix + "Header");
+ var tr = document.createElement("tr");
+ var i;
+ for (i = 0; i < headers.length; i++) {
+ var th = document.createElement("th");
+ $(th).text(headers[i]);
+ $(tr).append(th);
+ }
+ header.append(tr);
+
+ // Add table body
+ var body = $("#" + prefix + "Body");
+ for (i = 0; i < data.length; i++) {
+ tr = document.createElement("tr");
+ var row = data[i];
+ for (var j = 0; j < row.length; j++) {
+ var td = document.createElement("td");
+ if (j == 0) {
+ // first column is a link to job details page
+ $(td).html(row[j]);
+ } else {
+ $(td).text(row[j]);
+ }
+ $(tr).append(td);
+ }
+ body.append(tr);
+ }
+ } else {
+ $("#job" + prefix).hide();
+ }
+ },
+ renderHiveTable: function(queries, queryJobs) {
+ if (queries) {
+ // Set up table column headers
+ var header = $("#hiveTableHeader");
+ var tr = document.createElement("tr");
+ var headers = ["Query","Job","Map","Reduce","HDFS Read","HDFS Write"];
+ var i;
+
+ for (i = 0; i < headers.length; i++) {
+ var th = document.createElement("th");
+ $(th).text(headers[i]);
+ $(tr).append(th);
+ }
+ header.append(tr);
+
+ // Construct table body
+ var body = $("#hiveTableBody");
+ for (i = 0; i < queries.length; i++) {
+ // new query
+ tr = document.createElement("tr");
+ var td = document.createElement("td");
+ $(td).html("<b>" + queries[i] + "</b>");
+ $(tr).append(td);
+
+ var jobs = queryJobs[i];
+ if (jobs != null) {
+ // add first job for this query
+ var jobValues = jobs[0];
+ var j;
+ for (j = 0; j < jobValues.length; j++) {
+ td = document.createElement("td");
+ $(td).html(jobValues[j]);
+ $(tr).append(td);
+ }
+ body.append(tr);
+
+ // add remaining jobs for this query
+ for (j = 1; j < jobs.length; j++) {
+ jobValues = jobs[j];
+ tr = document.createElement("tr");
+
+ // add empty cell for query column
+ td = document.createElement("td");
+ $(td).html(" ");
+ $(tr).append(td);
+
+ // add job values
+ for (var k = 0; k < jobValues.length; k++) {
+ td = document.createElement("td");
+ $(td).html(jobValues[k]);
+ $(tr).append(td);
+ }
+ body.append(tr);
+ }
+
+ } else {
+ body.append(tr);
+ }
+ }
+ } else {
+ $("#hiveTable").hide();
+ }
+ }
+});
+
+var jobTabView;
+azkaban.JobTabView = Backbone.View.extend({
+ events: {
+ 'click #jobSummaryViewLink': 'handleJobSummaryViewLinkClick',
+ 'click #jobLogViewLink': 'handleJobLogViewLinkClick'
+ },
+
+ initialize: function(settings) {
+ var selectedView = settings.selectedView;
+ if (selectedView == 'joblog') {
+ this.handleJobLogViewLinkClick();
+ }
+ else {
+ this.handleJobSummaryViewLinkClick();
+ }
+ },
+
+ render: function() {
+ },
+
+ handleJobLogViewLinkClick: function() {
+ $('#jobSummaryViewLink').removeClass('active');
+ $('#jobSummaryView').hide();
+ $('#jobLogViewLink').addClass('active');
+ $('#jobLogView').show();
+ },
+
+ handleJobSummaryViewLinkClick: function() {
+ $('#jobSummaryViewLink').addClass('active');
+ $('#jobSummaryView').show();
+ $('#jobLogViewLink').removeClass('active');
+ $('#jobLogView').hide();
+ },
+});
+
+var showDialog = function(title, message) {
+ $('#messageTitle').text(title);
+ $('#messageBox').text(message);
+ $('#messageDialog').modal({
+ closeHTML: "<a href='#' title='Close' class='modal-close'>x</a>",
+ position: ["20%",],
+ containerId: 'confirm-container',
+ containerCss: {
+ 'height': '220px',
+ 'width': '565px'
+ },
+ onShow: function (dialog) {
+ }
+ });
+}
+
+$(function() {
+ var selected;
+ logModel = new azkaban.LogModel();
+ jobLogView = new azkaban.JobLogView({
+ el: $('#jobLogView'),
+ model: logModel
+ });
+
+ summaryModel = new azkaban.SummaryModel();
+ jobSummaryView = new azkaban.JobSummaryView({
+ el: $('#jobSummaryView'),
+ model: summaryModel
+ });
+
+ jobTabView = new azkaban.JobTabView({
+ el: $('#headertabs')
+ });
+
+ if (window.location.hash) {
+ var hash = window.location.hash;
+ if (hash == '#joblog') {
+ jobTabView.handleJobLogViewLinkClick();
+ }
+ else if (hash == '#jobsummary') {
+ jobTabView.handleJobSummaryViewLinkClick();
+ }
+ }
+});
src/web/js/dust-core-2.2.2.min.js 9(+9 -0)
diff --git a/src/web/js/dust-core-2.2.2.min.js b/src/web/js/dust-core-2.2.2.min.js
new file mode 100644
index 0000000..3606333
--- /dev/null
+++ b/src/web/js/dust-core-2.2.2.min.js
@@ -0,0 +1,9 @@
+//
+// Dust - Asynchronous Templating v2.2.2
+// http://akdubya.github.com/dustjs
+//
+// Copyright (c) 2010, Aleksander Williams
+// Released under the MIT License.
+//
+
+function getGlobal(){return function(){return this.dust}.call(null)}var dust={};(function(dust){function Context(e,t,n,r){this.stack=e,this.global=t,this.blocks=n,this.templateName=r}function Stack(e,t,n,r){this.tail=t,this.isObject=!dust.isArray(e)&&e&&typeof e=="object",this.head=e,this.index=n,this.of=r}function Stub(e){this.head=new Chunk(this),this.callback=e,this.out=""}function Stream(){this.head=new Chunk(this)}function Chunk(e,t,n){this.root=e,this.next=t,this.data=[],this.flushable=!1,this.taps=n}function Tap(e,t){this.head=e,this.tail=t}if(!dust)return;var ERROR="ERROR",WARN="WARN",INFO="INFO",DEBUG="DEBUG",levels=[DEBUG,INFO,WARN,ERROR],logger=function(){};dust.isDebug=!1,dust.debugLevel=INFO,typeof window!="undefined"&&window&&window.console&&window.console.log?logger=window.console.log:typeof console!="undefined"&&console&&console.log&&(logger=console.log),dust.log=function(e,t){var t=t||INFO;dust.isDebug&&levels.indexOf(t)>=levels.indexOf(dust.debugLevel)&&(dust.logQueue||(dust.logQueue=[]),dust.logQueue.push({message:e,type:t}),logger.call(console||window.console,"[DUST "+t+"]: "+e))},dust.onError=function(e,t){dust.log(e.message||e,ERROR);if(dust.isDebug)throw e;return t},dust.helpers={},dust.cache={},dust.register=function(e,t){if(!e)return;dust.cache[e]=t},dust.render=function(e,t,n){var r=(new Stub(n)).head;try{dust.load(e,r,Context.wrap(t,e)).end()}catch(i){dust.onError(i,r)}},dust.stream=function(e,t){var n=new Stream;return dust.nextTick(function(){try{dust.load(e,n.head,Context.wrap(t,e)).end()}catch(r){dust.onError(r,n.head)}}),n},dust.renderSource=function(e,t,n){return dust.compileFn(e)(t,n)},dust.compileFn=function(e,t){var n=dust.loadSource(dust.compile(e,t));return function(e,r){var i=r?new Stub(r):new Stream;return dust.nextTick(function(){typeof n=="function"?n(i.head,Context.wrap(e,t)).end():dust.onError(new Error("Template ["+t+"] cannot be resolved to a Dust function"))}),i}},dust.load=function(e,t,n){var r=dust.cache[e];return r?r(t,n):dust.onLoad?t.map(function(t){dust.onLoad(e,function(r,i){if(r)return t.setError(r);dust.cache[e]||dust.loadSource(dust.compile(i,e)),dust.cache[e](t,n).end()})}):t.setError(new Error("Template Not Found: "+e))},dust.loadSource=function(source,path){return eval(source)},Array.isArray?dust.isArray=Array.isArray:dust.isArray=function(e){return Object.prototype.toString.call(e)==="[object Array]"},dust.nextTick=function(){return typeof process!="undefined"?process.nextTick:function(e){setTimeout(e,0)}}(),dust.isEmpty=function(e){return dust.isArray(e)&&!e.length?!0:e===0?!1:!e},dust.filter=function(e,t,n){if(n)for(var r=0,i=n.length;r<i;r++){var s=n[r];s==="s"?(t=null,dust.log("Using unescape filter on ["+e+"]",DEBUG)):typeof dust.filters[s]=="function"?e=dust.filters[s](e):dust.onError(new Error("Invalid filter ["+s+"]"))}return t&&(e=dust.filters[t](e)),e},dust.filters={h:function(e){return dust.escapeHtml(e)},j:function(e){return dust.escapeJs(e)},u:encodeURI,uc:encodeURIComponent,js:function(e){return JSON?JSON.stringify(e):(dust.log("JSON is undefined. JSON stringify has not been used on ["+e+"]",WARN),e)},jp:function(e){return JSON?JSON.parse(e):(dust.log("JSON is undefined. JSON parse has not been used on ["+e+"]",WARN),e)}},dust.makeBase=function(e){return new Context(new Stack,e)},Context.wrap=function(e,t){return e instanceof Context?e:new Context(new Stack(e),{},null,t)},Context.prototype.get=function(e,t){return typeof e=="string"&&(e[0]==="."&&(t=!0,e=e.substr(1)),e=e.split(".")),this._get(t,e)},Context.prototype._get=function(e,t){var n=this.stack,r=1,i,s,o,u;dust.log("Searching for reference [{"+t.join(".")+"}] in template ["+this.getTemplateName()+"]",DEBUG),s=t[0],o=t.length;if(e&&o===0)u=n,n=n.head;else{if(!e){while(n){if(n.isObject){u=n.head,i=n.head[s];if(i!==undefined)break}n=n.tail}i!==undefined?n=i:n=this.global?this.global[s]:undefined}else n=n.head[s];while(n&&r<o)u=n,n=n[t[r]],r++}if(typeof n=="function"){var a=function(){return n.apply(u,arguments)};return a.isFunction=!0,a}return n===undefined&&dust.log("Cannot find the value for reference [{"+t.join(".")+"}] in template ["+this.getTemplateName()+"]"),n},Context.prototype.getPath=function(e,t){return this._get(e,t)},Context.prototype.push=function(e,t,n){return new Context(new Stack(e,this.stack,t,n),this.global,this.blocks,this.getTemplateName())},Context.prototype.rebase=function(e){return new Context(new Stack(e),this.global,this.blocks,this.getTemplateName())},Context.prototype.current=function(){return this.stack.head},Context.prototype.getBlock=function(e,t,n){if(typeof e=="function"){var r=new Chunk;e=e(r,this).data.join("")}var i=this.blocks;if(!i){dust.log("No blocks for context[{"+e+"}] in template ["+this.getTemplateName()+"]",DEBUG);return}var s=i.length,o;while(s--){o=i[s][e];if(o)return o}},Context.prototype.shiftBlocks=function(e){var t=this.blocks,n;return e?(t?n=t.concat([e]):n=[e],new Context(this.stack,this.global,n,this.getTemplateName())):this},Context.prototype.getTemplateName=function(){return this.templateName},Stub.prototype.flush=function(){var e=this.head;while(e){if(!e.flushable){if(e.error){this.callback(e.error),dust.onError(new Error("Chunk error ["+e.error+"] thrown. Ceasing to render this template.")),this.flush=function(){};return}return}this.out+=e.data.join(""),e=e.next,this.head=e}this.callback(null,this.out)},Stream.prototype.flush=function(){var e=this.head;while(e){if(!e.flushable){if(e.error){this.emit("error",e.error),dust.onError(new Error("Chunk error ["+e.error+"] thrown. Ceasing to render this template.")),this.flush=function(){};return}return}this.emit("data",e.data.join("")),e=e.next,this.head=e}this.emit("end")},Stream.prototype.emit=function(e,t){if(!this.events)return dust.log("No events to emit",INFO),!1;var n=this.events[e];if(!n)return dust.log("Event type ["+e+"] does not exist",WARN),!1;if(typeof n=="function")n(t);else if(dust.isArray(n)){var r=n.slice(0);for(var i=0,s=r.length;i<s;i++)r[i](t)}else dust.onError(new Error("Event Handler ["+n+"] is not of a type that is handled by emit"))},Stream.prototype.on=function(e,t){return this.events||(this.events={}),this.events[e]?typeof this.events[e]=="function"?this.events[e]=[this.events[e],t]:this.events[e].push(t):(dust.log("Event type ["+e+"] does not exist. Using just the specified callback.",WARN),t?this.events[e]=t:dust.log("Callback for type ["+e+"] does not exist. Listener not registered.",WARN)),this},Stream.prototype.pipe=function(e){return this.on("data",function(t){try{e.write(t,"utf8")}catch(n){dust.onError(n,e.head)}}).on("end",function(){try{return e.end()}catch(t){dust.onError(t,e.head)}}).on("error",function(t){e.error(t)}),this},Chunk.prototype.write=function(e){var t=this.taps;return t&&(e=t.go(e)),this.data.push(e),this},Chunk.prototype.end=function(e){return e&&this.write(e),this.flushable=!0,this.root.flush(),this},Chunk.prototype.map=function(e){var t=new Chunk(this.root,this.next,this.taps),n=new Chunk(this.root,t,this.taps);return this.next=n,this.flushable=!0,e(n),t},Chunk.prototype.tap=function(e){var t=this.taps;return t?this.taps=t.push(e):this.taps=new Tap(e),this},Chunk.prototype.untap=function(){return this.taps=this.taps.tail,this},Chunk.prototype.render=function(e,t){return e(this,t)},Chunk.prototype.reference=function(e,t,n,r){if(typeof e=="function"){e.isFunction=!0,e=e.apply(t.current(),[this,t,null,{auto:n,filters:r}]);if(e instanceof Chunk)return e}return dust.isEmpty(e)?this:this.write(dust.filter(e,n,r))},Chunk.prototype.section=function(e,t,n,r){if(typeof e=="function"){e=e.apply(t.current(),[this,t,n,r]);if(e instanceof Chunk)return e}var i=n.block,s=n["else"];r&&(t=t.push(r));if(dust.isArray(e)){if(i){var o=e.length,u=this;if(o>0){t.stack.head&&(t.stack.head.$len=o);for(var a=0;a<o;a++)t.stack.head&&(t.stack.head.$idx=a),u=i(u,t.push(e[a],a,o));return t.stack.head&&(t.stack.head.$idx=undefined,t.stack.head.$len=undefined),u}if(s)return s(this,t)}}else if(e===!0){if(i)return i(this,t)}else if(e||e===0){if(i)return i(this,t.push(e))}else if(s)return s(this,t);return dust.log("Not rendering section (#) block in template ["+t.getTemplateName()+"], because above key was not found",DEBUG),this},Chunk.prototype.exists=function(e,t,n){var r=n.block,i=n["else"];if(!dust.isEmpty(e)){if(r)return r(this,t)}else if(i)return i(this,t);return dust.log("Not rendering exists (?) block in template ["+t.getTemplateName()+"], because above key was not found",DEBUG),this},Chunk.prototype.notexists=function(e,t,n){var r=n.block,i=n["else"];if(dust.isEmpty(e)){if(r)return r(this,t)}else if(i)return i(this,t);return dust.log("Not rendering not exists (^) block check in template ["+t.getTemplateName()+"], because above key was found",DEBUG),this},Chunk.prototype.block=function(e,t,n){var r=n.block;return e&&(r=e),r?r(this,t):this},Chunk.prototype.partial=function(e,t,n){var r;r=dust.makeBase(t.global),r.blocks=t.blocks,t.stack&&t.stack.tail&&(r.stack=t.stack.tail),n&&(r=r.push(n)),typeof e=="string"&&(r.templateName=e),r=r.push(t.stack.head);var i;return typeof e=="function"?i=this.capture(e,r,function(e,t){r.templateName=r.templateName||e,dust.load(e,t,r).end()}):i=dust.load(e,this,r),i},Chunk.prototype.helper=function(e,t,n,r){var i=this;try{return dust.helpers[e]?dust.helpers[e](i,t,n,r):dust.onError(new Error("Invalid helper ["+e+"]"),i)}catch(s){return dust.onError(s,i)}},Chunk.prototype.capture=function(e,t,n){return this.map(function(r){var i=new Stub(function(e,t){e?r.setError(e):n(t,r)});e(i.head,t).end()})},Chunk.prototype.setError=function(e){return this.error=e,this.root.flush(),this},Tap.prototype.push=function(e){return new Tap(e,this)},Tap.prototype.go=function(e){var t=this;while(t)e=t.head(e),t=t.tail;return e};var HCHARS=new RegExp(/[&<>\"\']/),AMP=/&/g,LT=/</g,GT=/>/g,QUOT=/\"/g,SQUOT=/\'/g;dust.escapeHtml=function(e){return typeof e=="string"?HCHARS.test(e)?e.replace(AMP,"&").replace(LT,"<").replace(GT,">").replace(QUOT,""").replace(SQUOT,"'"):e:e};var BS=/\\/g,FS=/\//g,CR=/\r/g,LS=/\u2028/g,PS=/\u2029/g,NL=/\n/g,LF=/\f/g,SQ=/'/g,DQ=/"/g,TB=/\t/g;dust.escapeJs=function(e){return typeof e=="string"?e.replace(BS,"\\\\").replace(FS,"\\/").replace(DQ,'\\"').replace(SQ,"\\'").replace(CR,"\\r").replace(LS,"\\u2028").replace(PS,"\\u2029").replace(NL,"\\n").replace(LF,"\\f").replace(TB,"\\t"):e}})(dust),typeof exports!="undefined"&&(typeof process!="undefined"&&require("./server")(dust),module.exports=dust)
\ No newline at end of file