azkaban-uncached

hdfs browser dropped in

10/11/2012 10:41:09 PM

Details

diff --git a/lib/avro-1.4.1.jar b/lib/avro-1.4.1.jar
new file mode 100644
index 0000000..543b3ca
Binary files /dev/null and b/lib/avro-1.4.1.jar differ
diff --git a/lib/commons-configuration-1.8.jar b/lib/commons-configuration-1.8.jar
new file mode 100644
index 0000000..ae9ae99
Binary files /dev/null and b/lib/commons-configuration-1.8.jar differ
diff --git a/lib/voldemort-0.96.jar b/lib/voldemort-0.96.jar
new file mode 100644
index 0000000..3d46b3a
Binary files /dev/null and b/lib/voldemort-0.96.jar differ
diff --git a/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java b/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java
index 0ee1126..a86970d 100644
--- a/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java
+++ b/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java
@@ -16,7 +16,7 @@
 package azkaban.jobExecutor;
 
 import azkaban.utils.Props;
-import azkaban.jobExecutor.utils.SecurityUtils;
+import azkaban.utils.SecurityUtils;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.log4j.ConsoleAppender;
diff --git a/src/java/azkaban/jobExecutor/PigProcessJob.java b/src/java/azkaban/jobExecutor/PigProcessJob.java
index b0d8ae8..190f674 100644
--- a/src/java/azkaban/jobExecutor/PigProcessJob.java
+++ b/src/java/azkaban/jobExecutor/PigProcessJob.java
@@ -28,11 +28,11 @@ import java.util.StringTokenizer;
 
 import org.apache.log4j.Logger;
 
-import static azkaban.jobExecutor.utils.SecurityUtils.PROXY_KEYTAB_LOCATION;
-import static azkaban.jobExecutor.utils.SecurityUtils.PROXY_USER;
-import static azkaban.jobExecutor.utils.SecurityUtils.TO_PROXY;
-import static azkaban.jobExecutor.utils.SecurityUtils.shouldProxy;
 import static azkaban.jobExecutor.SecurePigWrapper.OBTAIN_BINARY_TOKEN;
+import static azkaban.utils.SecurityUtils.PROXY_KEYTAB_LOCATION;
+import static azkaban.utils.SecurityUtils.PROXY_USER;
+import static azkaban.utils.SecurityUtils.TO_PROXY;
+import static azkaban.utils.SecurityUtils.shouldProxy;
 
 public class PigProcessJob extends JavaProcessJob {
     
diff --git a/src/java/azkaban/jobExecutor/SecurePigWrapper.java b/src/java/azkaban/jobExecutor/SecurePigWrapper.java
index 0103ddb..e0f1106 100644
--- a/src/java/azkaban/jobExecutor/SecurePigWrapper.java
+++ b/src/java/azkaban/jobExecutor/SecurePigWrapper.java
@@ -31,7 +31,7 @@ import java.io.IOException;
 import java.security.PrivilegedExceptionAction;
 import java.util.Properties;
 
-import static azkaban.jobExecutor.utils.SecurityUtils.getProxiedUser;
+import static azkaban.utils.SecurityUtils.getProxiedUser;
 
 public class SecurePigWrapper {
 
diff --git a/src/java/azkaban/utils/WebUtils.java b/src/java/azkaban/utils/WebUtils.java
index ad1b734..4c063bf 100644
--- a/src/java/azkaban/utils/WebUtils.java
+++ b/src/java/azkaban/utils/WebUtils.java
@@ -1,5 +1,7 @@
 package azkaban.utils;
 
+import java.text.NumberFormat;
+
 import org.joda.time.DateTime;
 import org.joda.time.DurationFieldType;
 import org.joda.time.ReadablePeriod;
@@ -9,6 +11,11 @@ import azkaban.executor.ExecutableFlow.Status;
 
 public class WebUtils {
 	public static final String DATE_TIME_STRING = "YYYY-MM-dd HH:mm:ss";
+
+    private static final long ONE_KB = 1024;
+    private static final long ONE_MB = 1024 * ONE_KB;
+    private static final long ONE_GB = 1024 * ONE_MB;
+    private static final long ONE_TB = 1024 * ONE_GB;
 	
 	public String formatDate(long timeMS) {
 		if (timeMS == -1) {
@@ -82,6 +89,10 @@ public class WebUtils {
 		return DateTimeFormat.forPattern(DATE_TIME_STRING).print(dt);
 	}
 	
+    public String formatDateTime(long timestamp) {
+        return formatDateTime(new DateTime(timestamp));
+    }
+	
 	public String formatPeriod(ReadablePeriod period)
 	{
         String periodStr = "n";
@@ -124,4 +135,21 @@ public class WebUtils {
 		
 		return execId.substring(0, index2);
 	}
+	
+    public String displayBytes(long sizeBytes) {
+        NumberFormat nf = NumberFormat.getInstance();
+        nf.setMaximumFractionDigits(2);
+        if(sizeBytes >= ONE_TB)
+            return nf.format(sizeBytes / (double) ONE_TB) + " tb";
+        else if(sizeBytes >= ONE_GB)
+            return nf.format(sizeBytes / (double) ONE_GB) + " gb";
+        else if(sizeBytes >= ONE_MB)
+            return nf.format(sizeBytes / (double) ONE_MB) + " mb";
+        else if(sizeBytes >= ONE_KB)
+            return nf.format(sizeBytes / (double) ONE_KB) + " kb";
+        else
+            return sizeBytes + " B";
+    }
+    
+    
 }
diff --git a/src/java/azkaban/webapp/AzkabanWebServer.java b/src/java/azkaban/webapp/AzkabanWebServer.java
index 994546f..2871ef2 100644
--- a/src/java/azkaban/webapp/AzkabanWebServer.java
+++ b/src/java/azkaban/webapp/AzkabanWebServer.java
@@ -20,6 +20,9 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.lang.reflect.Constructor;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
 import java.util.Arrays;
 import java.util.TimeZone;
 
@@ -47,6 +50,7 @@ import azkaban.utils.Utils;
 import azkaban.webapp.servlet.AzkabanServletContextListener;
 
 import azkaban.webapp.servlet.ExecutorServlet;
+import azkaban.webapp.servlet.HdfsBrowserServlet;
 import azkaban.webapp.servlet.ScheduleServlet;
 import azkaban.webapp.servlet.HistoryServlet;
 import azkaban.webapp.servlet.IndexServlet;
@@ -99,8 +103,9 @@ public class AzkabanWebServer {
 	private UserManager userManager;
 	private ProjectManager projectManager;
 	private ExecutorManager executorManager;
-	
 	private ScheduleManager scheduleManager;
+
+	private final ClassLoader _baseClassLoader;
 	
 	private Props props;
 	private SessionCache sessionCache;
@@ -125,7 +130,7 @@ public class AzkabanWebServer {
 		projectManager = loadProjectManager(props);
 		executorManager = loadExecutorManager(props);
 		scheduleManager = loadScheduleManager(executorManager, props);
-
+		_baseClassLoader = getBaseClassloader();
 		tempDir = new File(props.getString("azkaban.temp.dir", "temp"));
 
 		// Setup time zone
@@ -272,7 +277,34 @@ public class AzkabanWebServer {
 		engine.setProperty("parser.pool.size", 3);
 		return engine;
 	}
-
+	
+    private ClassLoader getBaseClassloader() throws MalformedURLException
+    {
+        final ClassLoader retVal;
+
+        String hadoopHome = System.getenv("HADOOP_HOME");
+	String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
+
+        if(hadoopConfDir != null) {
+	  logger.info("Using hadoop config found in " + hadoopConfDir);
+	  retVal = new URLClassLoader(new URL[] { new File(hadoopConfDir).toURI().toURL() },
+				      getClass().getClassLoader());
+	} else if(hadoopHome != null) {
+            logger.info("Using hadoop config found in " + hadoopHome);
+            retVal = new URLClassLoader(new URL[] { new File(hadoopHome, "conf").toURI().toURL() },
+                                        getClass().getClassLoader());
+        } else {
+            logger.info("HADOOP_HOME not set, using default hadoop config.");
+            retVal = getClass().getClassLoader();
+        }
+
+        return retVal;
+    }
+
+    public ClassLoader getClassLoader() {
+        return _baseClassLoader;
+    }
+    
 	/**
 	 * Returns the global azkaban properties
 	 * 
@@ -367,6 +399,7 @@ public class AzkabanWebServer {
 		root.addServlet(new ServletHolder(new ExecutorServlet()),"/executor");
 		root.addServlet(new ServletHolder(new HistoryServlet()), "/history");
 		root.addServlet(new ServletHolder(new ScheduleServlet()),"/schedule");
+		root.addServlet(new ServletHolder(new HdfsBrowserServlet()), "/hdfs/*");
 		
 		root.setAttribute(AzkabanServletContextListener.AZKABAN_SERVLET_CONTEXT_KEY, app);
 
diff --git a/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java b/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
index ac000b5..2b8102b 100644
--- a/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
+++ b/src/java/azkaban/webapp/servlet/AbstractAzkabanServlet.java
@@ -195,6 +195,7 @@ public abstract class AbstractAzkabanServlet extends HttpServlet {
 	 */
 	protected void setErrorMessageInCookie(HttpServletResponse response, String errorMsg) {
 		Cookie cookie = new Cookie(AZKABAN_FAILURE_MESSAGE, errorMsg);
+		cookie.setPath("/");
 		response.addCookie(cookie);
 	}
 
@@ -207,6 +208,7 @@ public abstract class AbstractAzkabanServlet extends HttpServlet {
 	 */
 	protected void setSuccessMessageInCookie(HttpServletResponse response, String message) {
 		Cookie cookie = new Cookie(AZKABAN_SUCCESS_MESSAGE, message);
+		cookie.setPath("/");
 		response.addCookie(cookie);
 	}
 
@@ -251,12 +253,13 @@ public abstract class AbstractAzkabanServlet extends HttpServlet {
 		Cookie[] cookies = request.getCookies();
 		if (cookies != null) {
 			for (Cookie cookie : cookies) {
+				//if (name.equals(cookie.getName()) && cookie.getPath()!=null && cookie.getPath().equals("/")) {
 				if (name.equals(cookie.getName())) {
 					return cookie;
 				}
 			}
 		}
-
+		
 		return null;
 	}
 
diff --git a/src/java/azkaban/webapp/servlet/HdfsBrowserServlet.java b/src/java/azkaban/webapp/servlet/HdfsBrowserServlet.java
new file mode 100644
index 0000000..fed4de3
--- /dev/null
+++ b/src/java/azkaban/webapp/servlet/HdfsBrowserServlet.java
@@ -0,0 +1,294 @@
+/*
+ * Copyright 2010 LinkedIn, Inc
+ * 
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package azkaban.webapp.servlet;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.security.PrivilegedAction;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Properties;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletException;
+import javax.servlet.http.Cookie;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+
+import azkaban.executor.ExecutorManager;
+import azkaban.fsviewers.HdfsAvroFileViewer;
+import azkaban.fsviewers.HdfsFileViewer;
+import azkaban.fsviewers.JsonSequenceFileViewer;
+import azkaban.fsviewers.TextFileViewer;
+import azkaban.project.ProjectManager;
+import azkaban.scheduler.ScheduleManager;
+import azkaban.user.User;
+import azkaban.utils.Props;
+import azkaban.utils.SecurityUtils;
+import azkaban.utils.WebUtils;
+import azkaban.webapp.session.Session;
+
+/**
+ * A servlet that shows the filesystem contents
+ * 
+ * @author jkreps
+ * 
+ */
+
+public class HdfsBrowserServlet extends LoginAbstractAzkabanServlet {
+	private static final long serialVersionUID = 1L;
+	
+    private ArrayList<HdfsFileViewer> _viewers = new ArrayList<HdfsFileViewer>();
+
+    // Default viewer will be a text viewer
+    private HdfsFileViewer _defaultViewer = new TextFileViewer();
+
+
+    
+    private static Logger logger = Logger.getLogger(HdfsBrowserServlet.class);
+
+    private Configuration conf;
+    
+    private Properties property;
+    
+//    public HdfsBrowserServlet() {
+//        super();
+//        _viewers.add(new HdfsAvroFileViewer());
+//        _viewers.add(new JsonSequenceFileViewer());
+//    }
+
+    @Override
+	public void init(ServletConfig config) throws ServletException {
+		super.init(config);
+		
+		_viewers.add(new HdfsAvroFileViewer());
+		_viewers.add(new JsonSequenceFileViewer());
+		
+		property = this.getApplication().getAzkabanProps().toProperties();
+		
+		conf = new Configuration();
+		conf.setClassLoader(this.getApplication().getClassLoader());
+		
+        logger.info("HDFS Browser init");
+        logger.info("hadoop.security.authentication set to " + conf.get("hadoop.security.authentication"));
+        logger.info("hadoop.security.authorization set to " + conf.get("hadoop.security.authorization"));
+        logger.info("DFS name " + conf.get("fs.default.name"));
+    }
+
+
+    
+    @Override
+    protected void handleGet(HttpServletRequest req, HttpServletResponse resp, Session session) throws ServletException, IOException {
+      
+    	String user = session.getUser().getUserId();
+    	UserGroupInformation ugi = null;
+    	try {
+    		ugi = SecurityUtils.getProxiedUser(user, this.property, logger, conf);
+    	   	FileSystem fs = ugi.doAs(new PrivilegedAction<FileSystem>(){
+
+    	   		@Override	
+    	   		public FileSystem run() {
+    	   			try {
+    	   				return FileSystem.get(conf);
+    	   			} catch (IOException e) {
+    	   				throw new RuntimeException(e);
+    	   			}
+    	   		}});
+
+    	   	try {
+    	   		handleFSDisplay(fs, user, req, resp, session);
+    	   	} catch (IOException e) {
+    	   		fs.close();
+    	   		throw e;
+    	   	}
+    	   	fs.close();
+    	}
+    	catch (Exception e) {
+    		Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/hdfsbrowserpage.vm");
+    		page.add("error_message", e.getMessage());
+    		page.add("no_fs", "true");
+    		page.render();
+    	}
+	}
+
+
+//    private void setCookieInResponse(HttpServletResponse resp, String key, String value) {
+//        if (value == null) {
+//            Cookie cookie = new Cookie(key, "");
+//            cookie.setMaxAge(0);
+//            resp.addCookie(cookie);
+//        }
+//        else {
+//            Cookie cookie = new Cookie(key, value);
+//            resp.addCookie(cookie);
+//        }
+//    }
+     
+//    private String getUserFromRequest(HttpServletRequest req) {
+//        Cookie cookie = getCookieByName(req, SESSION_ID_NAME);
+//        if (cookie == null) {
+//            return null;
+//        }
+//        return cookie.getValue();
+//    }
+    
+//    @Override
+//    protected void handlePost(HttpServletRequest req, HttpServletResponse resp, Session session) 
+//            throws ServletException, IOException {
+//        if (hasParam(req, "logout")) {
+//            setCookieInResponse(resp, SESSION_ID_NAME, null);
+//            Page page = newPage(req, resp, "azkaban/web/pages/hdfs_browser_login.vm");
+//            page.render();
+//        } else if(hasParam(req, "login")) {
+//            Props prop = this.getApplication().getAzkabanProps();
+//            Properties property = prop.toProperties();
+//            
+//            String user = getParam(req, "login");
+//            logger.info("hadoop.security.authentication set to " + conf.get("hadoop.security.authentication"));
+//            logger.info("hadoop.security.authorization set to " + conf.get("hadoop.security.authorization"));
+//            
+//            UserGroupInformation ugi = SecurityUtils.getProxiedUser(user, property, logger, conf);
+//            logger.info("Logging in as " + user);
+//            FileSystem fs = ugi.doAs(new PrivilegedAction<FileSystem>(){
+//                @Override
+//                public FileSystem run() {
+//                    try {
+//                        return FileSystem.get(conf);
+//                    } catch (IOException e) {
+//                        throw new RuntimeException(e);
+//                    }
+//                }});
+//
+//            setCookieInResponse(resp, SESSION_ID_NAME, user);
+//            try {
+//                handleFSDisplay(fs, user, req, resp);
+//            } catch (IOException e) {
+//                throw e;
+//            }
+//            finally {
+//                fs.close();
+//            }
+//        }
+//    }
+    
+    private void handleFSDisplay(FileSystem fs, String user, HttpServletRequest req, HttpServletResponse resp, Session session) throws IOException {
+        String prefix = req.getContextPath() + req.getServletPath();
+        String fsPath = req.getRequestURI().substring(prefix.length());
+        if(fsPath.length() == 0)
+            fsPath = "/";
+
+        if(logger.isDebugEnabled())
+            logger.debug("path=" + fsPath);
+
+        Path path = new Path(fsPath);
+        if(!fs.exists(path)) {
+            throw new IllegalArgumentException(path.toUri().getPath() + " does not exist.");
+        }
+        else if(fs.isFile(path)) {
+            displayFile(fs, req, resp, session, path);
+        }
+        else if(fs.getFileStatus(path).isDir()) {
+                displayDir(fs, user, req, resp, session, path);
+        } else {
+            throw new IllegalStateException("It exists, it is not a file, and it is not a directory, what is it precious?");
+        }
+    }
+
+    private void displayDir(FileSystem fs, String user, HttpServletRequest req, HttpServletResponse resp, Session session, Path path)
+            throws IOException {
+
+        Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/hdfsbrowserpage.vm");
+
+        List<Path> paths = new ArrayList<Path>();
+        List<String> segments = new ArrayList<String>();
+        Path curr = path;
+        while(curr.getParent() != null) {
+            paths.add(curr);
+            segments.add(curr.getName());
+            curr = curr.getParent();
+        }
+
+        Collections.reverse(paths);
+        Collections.reverse(segments);
+
+        page.add("paths", paths);
+        page.add("segments", segments);
+
+        try {
+            page.add("subdirs", fs.listStatus(path)); // ??? line
+        }
+        catch (AccessControlException e) {
+            page.add("error_message", "Permission denied. User cannot read file or directory");
+        }
+        catch (IOException e) {
+            page.add("error_message", e.getMessage());
+        }
+        page.render();
+
+    }
+
+    private void displayFile(FileSystem fs, HttpServletRequest req, HttpServletResponse resp, Session session, Path path)
+            throws IOException {
+        int startLine = getIntParam(req, "start_line", 1);
+        int endLine = getIntParam(req, "end_line", 1000);
+
+        // use registered viewers to show the file content
+        boolean outputed = false;
+        OutputStream output = resp.getOutputStream();
+        for(HdfsFileViewer viewer: _viewers) {
+            if(viewer.canReadFile(fs, path)) {
+                viewer.displayFile(fs, path, output, startLine, endLine);
+                outputed = true;
+                break; // don't need to try other viewers
+            }
+        }
+
+        // use default text viewer
+        if(!outputed) {
+            if(_defaultViewer.canReadFile(fs, path)) {
+                _defaultViewer.displayFile(fs, path, output, startLine, endLine);
+            } else {
+                output.write(("Sorry, no viewer available for this file. ").getBytes("UTF-8"));
+            }
+        }
+    }
+
+	@Override
+	protected void handlePost(HttpServletRequest req, HttpServletResponse resp,
+			Session session) throws ServletException, IOException {
+		// TODO Auto-generated method stub
+		
+	}
+
+
+
+
+
+
+}
diff --git a/src/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java b/src/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java
index 89486ec..1203e78 100644
--- a/src/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java
+++ b/src/java/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java
@@ -157,7 +157,9 @@ public abstract class LoginAbstractAzkabanServlet extends
 
 			String randomUID = UUID.randomUUID().toString();
 			Session session = new Session(randomUID, user);
-			resp.addCookie(new Cookie(SESSION_ID_NAME, randomUID));
+			Cookie cookie = new Cookie(SESSION_ID_NAME, randomUID);
+			cookie.setPath("/");
+			resp.addCookie(cookie);
 			getApplication().getSessionCache().addSession(session);
 			handleGet(req, resp, session);
 		} 
diff --git a/src/java/azkaban/webapp/servlet/velocity/hdfsbrowserpage.vm b/src/java/azkaban/webapp/servlet/velocity/hdfsbrowserpage.vm
new file mode 100644
index 0000000..18842d4
--- /dev/null
+++ b/src/java/azkaban/webapp/servlet/velocity/hdfsbrowserpage.vm
@@ -0,0 +1,93 @@
+<!DOCTYPE html> 
+<html>
+	<head>
+#parse( "azkaban/webapp/servlet/velocity/style.vm" )
+		<script type="text/javascript" src="${context}/js/jquery/jquery.js"></script>    
+		<script type="text/javascript" src="${context}/js/namespace.js"></script>
+		<script type="text/javascript" src="${context}/js/underscore-1.2.1-min.js"></script>
+		<script type="text/javascript" src="${context}/js/backbone-0.5.3-min.js"></script>
+		<script type="text/javascript" src="${context}/js/jquery.simplemodal.js"></script>
+		<script type="text/javascript" src="${context}/js/azkaban.nav.js"></script>
+		<script type="text/javascript">
+			var contextURL = "${context}";
+			var currentTime = ${currentTime};
+			var timezone = "${timezone}";
+			var errorMessage = null;
+			var successMessage = null;
+		</script>
+	</head>
+	<body>
+#set($current_page="hdfsbrowser")
+#parse( "azkaban/webapp/servlet/velocity/nav.vm" )
+		<div class="messaging"><p id="messageClose">X</p><p id="message"></p></div>  
+
+		<div class="content">
+		
+		#if($errorMsg)
+                	<div class="box-error-message">$errorMsg</div>
+		#else
+			#if($error_message != "null")
+	                <div class="box-error-message">$error_message</div>
+			#elseif($success_message != "null")
+        	        <div class="box-success-message">$success_message</div>
+			#end
+		#end		
+		
+			<div id="all-hdfsbrowser-content">
+				<div class="section-hd">
+					<h2>HDFS Browser</h2>
+				</div>
+			</div>
+		#if(!$no_fs)
+			<div class="box">
+				<div class="breadcrumbs">
+					<a href="${context}/hdfs/">/</a>
+					#set($size = $paths.size() - 1)
+					#if($size >= 0)
+						#foreach($i in [0 ..$size])
+							<a href="$context/hdfs${paths.get($i)}">${segments.get($i)}</a> /
+						#end
+					#end
+				</div>
+			
+				<div class="subdirs">
+					<table id="hdfs-dir" class="fileTable">
+					<thead>
+						<tr>
+							<th>File</th>
+							<th>Permission</th>
+							<th>Owner/Group</th>
+							<th>Size</th>
+							<th>Modified Date</th>
+						</tr>
+					</thead>
+					<tbody>
+						#if($subdirs)
+							#foreach($status in $subdirs)
+						<tr>
+							<td>
+								<a href="${context}/hdfs${status.getPath().toUri().getPath()}">${status.path.name}#if($status.isDir())/#end</a>
+							</td>
+							<td>${status.permission}</td>
+							<td>${status.owner}/${status.group}</td>
+							<td>
+								#if($status.isDir())
+                                	&ndash;
+                                #else
+                                	$utils.displayBytes(${status.len})
+                                #end
+                            </td>
+							<td>$utils.formatDateTime(${status.modificationTime})</td>
+						</tr>
+							#end
+						#else
+						<tr><td>No Files In This Directory</td></tr>
+						#end
+					</tbody>
+				</table>
+				
+			</div>	
+		</div>
+		#end
+	</body>
+</html>
diff --git a/src/java/azkaban/webapp/servlet/velocity/nav.vm b/src/java/azkaban/webapp/servlet/velocity/nav.vm
index 0045e1d..9df715b 100644
--- a/src/java/azkaban/webapp/servlet/velocity/nav.vm
+++ b/src/java/azkaban/webapp/servlet/velocity/nav.vm
@@ -11,7 +11,7 @@
 				<li id="scheduled-jobs-tab" #if($current_page == 'schedule')class="selected"#end onClick="navMenuClick('$!context/schedule')"><a href="$!context/schedule">Scheduled</a></li>
 				<li id="executing-jobs-tab" #if($current_page == 'executing')class="selected"#end onClick="navMenuClick('$!context/executor')"><a href="$!context/executor">Executing</a></li>
 				<li id="history-jobs-tab" #if($current_page == 'history')class="selected"#end onClick="navMenuClick('$!context/history')"><a href="$!context/history">History</a></li>
-				<li><a href="$!context/fs">HDFS</a></li>
+				<li id="hdfs-browser-tab" #if($current_page == 'hdfsbrowser')class="selected"#end onClick="navMenuClick('$!context/hdfs')"><a href="$!context/hdfs">HDFS</a></li>
 			</ul>