azkaban-uncached
Changes
src/java/azkaban/utils/SecurityUtils.java 21(+15 -6)
Details
diff --git a/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java b/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java
index ff24162..3c2acf1 100644
--- a/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java
+++ b/src/java/azkaban/jobExecutor/JavaJobRunnerMain.java
@@ -33,6 +33,7 @@ import java.io.OutputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
+import java.lang.reflect.UndeclaredThrowableException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.LinkedHashMap;
src/java/azkaban/utils/SecurityUtils.java 21(+15 -6)
diff --git a/src/java/azkaban/utils/SecurityUtils.java b/src/java/azkaban/utils/SecurityUtils.java
index 42808b3..b6e7fe0 100644
--- a/src/java/azkaban/utils/SecurityUtils.java
+++ b/src/java/azkaban/utils/SecurityUtils.java
@@ -37,18 +37,25 @@ public class SecurityUtils {
* necessary from properties file.
*/
public static synchronized UserGroupInformation getProxiedUser(String toProxy, Properties prop, Logger log, Configuration conf) throws IOException {
- if(toProxy == null) {
- throw new IllegalArgumentException("toProxy can't be null");
- }
+
if(conf == null) {
throw new IllegalArgumentException("conf can't be null");
}
-
+ UserGroupInformation.setConfiguration(conf);
+ // don't do privileged actions in case the hadoop is not secured.
+ if(!UserGroupInformation.isSecurityEnabled()) {
+ //we don't get into whether to allow impersonation as anybody in unsecured grid
+ return null;
+ }
+
+ if(toProxy == null) {
+ throw new IllegalArgumentException("toProxy can't be null");
+ }
+
if (loginUser == null) {
log.info("No login user. Creating login user");
String keytab = verifySecureProperty(prop, PROXY_KEYTAB_LOCATION, log);
String proxyUser = verifySecureProperty(prop, PROXY_USER, log);
- UserGroupInformation.setConfiguration(conf);
UserGroupInformation.loginUserFromKeytab(proxyUser, keytab);
loginUser = UserGroupInformation.getLoginUser();
log.info("Logged in with user " + loginUser);
@@ -66,7 +73,9 @@ public class SecurityUtils {
*/
public static UserGroupInformation getProxiedUser(Properties prop, Logger log, Configuration conf) throws IOException {
String toProxy = verifySecureProperty(prop, TO_PROXY, log);
- return getProxiedUser(toProxy, prop, log, conf);
+ UserGroupInformation user = getProxiedUser(toProxy, prop, log, conf);
+ if(user == null) throw new IOException("Proxy as any user in unsecured grid is not supported!");
+ return user;
}
public static String verifySecureProperty(Properties properties, String s, Logger l) throws IOException {
diff --git a/src/java/azkaban/webapp/servlet/HdfsBrowserServlet.java b/src/java/azkaban/webapp/servlet/HdfsBrowserServlet.java
index fed4de3..0a82625 100644
--- a/src/java/azkaban/webapp/servlet/HdfsBrowserServlet.java
+++ b/src/java/azkaban/webapp/servlet/HdfsBrowserServlet.java
@@ -16,8 +16,12 @@
package azkaban.webapp.servlet;
+import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLClassLoader;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Collections;
@@ -30,6 +34,7 @@ import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
+import javax.swing.filechooser.FileNameExtensionFilter;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
@@ -91,8 +96,35 @@ public class HdfsBrowserServlet extends LoginAbstractAzkabanServlet {
_viewers.add(new JsonSequenceFileViewer());
property = this.getApplication().getAzkabanProps().toProperties();
-
conf = new Configuration();
+
+// String hadoopHome = System.getenv("HADOOP_HOME");
+// String hadoopConfDir = System.getenv("HADOOP_CONF_DIR");
+// if(hadoopHome == null && hadoopConfDir == null) {
+// logger.error("HADOOP_HOME not set!");
+// throw new ServletException("HADOOP_HOME or HADOOP_CONF_DIR not set for hdfs browser!");
+// }
+//
+// try {
+// if(hadoopConfDir != null) {
+// logger.info("Using hadoop config found in " + hadoopConfDir);
+// for(File file : new File(hadoopConfDir).listFiles()) {
+// if(file.isFile() && file.getName().endsWith(".xml"))
+// conf.addResource(file.toURI().toURL());
+// }
+// } else if(hadoopHome != null) {
+// logger.info("Using hadoop config found in " + hadoopHome);
+// for(File file : new File(hadoopHome, "conf").listFiles()) {
+// if(file.isFile() && file.getName().endsWith(".xml"))
+// conf.addResource(file.toURI().toURL());
+// }
+// }
+// }
+// catch (MalformedURLException e) {
+// throw new ServletException("HADOOP_HOME or HADOOP_CONF_DIR is not valid!");
+// }
+
+
conf.setClassLoader(this.getApplication().getClassLoader());
logger.info("HDFS Browser init");
@@ -110,17 +142,24 @@ public class HdfsBrowserServlet extends LoginAbstractAzkabanServlet {
UserGroupInformation ugi = null;
try {
ugi = SecurityUtils.getProxiedUser(user, this.property, logger, conf);
- FileSystem fs = ugi.doAs(new PrivilegedAction<FileSystem>(){
-
- @Override
- public FileSystem run() {
- try {
- return FileSystem.get(conf);
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }});
-
+
+ FileSystem fs;
+ if (ugi != null) {
+ fs = ugi.doAs(new PrivilegedAction<FileSystem>(){
+
+ @Override
+ public FileSystem run() {
+ try {
+ return FileSystem.get(conf);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }});
+ }
+ else {
+ fs = FileSystem.get(conf);
+ }
+
try {
handleFSDisplay(fs, user, req, resp, session);
} catch (IOException e) {