azkaban-aplcache

security hole fix: Hadoop delegation tokens are world readable

9/22/2016 5:57:13 PM

Details

diff --git a/azkaban-common/build.gradle b/azkaban-common/build.gradle
index 79faaf8..687e392 100644
--- a/azkaban-common/build.gradle
+++ b/azkaban-common/build.gradle
@@ -40,7 +40,6 @@ dependencies {
   compile('org.quartz-scheduler:quartz:2.2.1')
 
   testCompile(project(':azkaban-test').sourceSets.test.output)
-  testCompile('junit:junit:4.11')
   testCompile('org.hamcrest:hamcrest-all:1.3')
 }
 
diff --git a/azkaban-exec-server/build.gradle b/azkaban-exec-server/build.gradle
index 4642da6..2a77009 100644
--- a/azkaban-exec-server/build.gradle
+++ b/azkaban-exec-server/build.gradle
@@ -3,7 +3,6 @@ apply plugin: 'distribution'
 dependencies {
   compile(project(':azkaban-common'))
 
-  testCompile('junit:junit:4.11')
   testCompile('org.hamcrest:hamcrest-all:1.3')
   testCompile(project(':azkaban-common').sourceSets.test.output)
 }
diff --git a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/ExecuteAsUser.java b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/ExecuteAsUser.java
new file mode 100644
index 0000000..d0aea33
--- /dev/null
+++ b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/ExecuteAsUser.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2011 LinkedIn Corp.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package azkaban.security;
+
+import org.apache.log4j.Logger;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * This is a wrapper over the binary executable execute-as-user. It provides a simple API to run commands as
+ * another user while abstracting away the process logic, commandline handling, etc.
+ *
+ */
+public class ExecuteAsUser {
+  private final static Logger log = Logger.getLogger(ExecuteAsUser.class);
+  private final static String EXECUTE_AS_USER = "execute-as-user";
+
+  private final File binaryExecutable;
+
+  /**
+   * Construct the object
+   *
+   * @param nativeLibDirectory Absolute path to the native Lib Directory
+   */
+  public ExecuteAsUser(final String nativeLibDirectory) {
+    this.binaryExecutable = new File(nativeLibDirectory, EXECUTE_AS_USER);
+    validate();
+  }
+
+  private void validate() {
+    if (!binaryExecutable.canExecute()) {
+      throw new RuntimeException("Unable to execute execute-as-user binary. Invalid Path: "
+          + binaryExecutable.getAbsolutePath());
+    }
+  }
+
+  /**
+   * API to execute a command on behalf of another user.
+   *
+   * @param user The proxy user
+   * @param command the list containing the program and its arguments
+   * @return The return value of the shell command
+   * @throws IOException
+   */
+  public int execute(final String user, final List<String> command) throws IOException {
+    log.info("Command: " + command);
+    Process process = new ProcessBuilder()
+        .command(constructExecuteAsCommand(user, command))
+        .inheritIO()
+        .start();
+
+    int exitCode;
+    try {
+      exitCode = process.waitFor();
+    } catch (InterruptedException e) {
+      log.error(e.getMessage(), e);
+      exitCode = 1;
+    }
+    return exitCode;
+  }
+
+  private List<String> constructExecuteAsCommand(String user, List<String> command) {
+    List<String> commandList = new ArrayList<>();
+    commandList.add(binaryExecutable.getAbsolutePath());
+    commandList.add(user);
+    commandList.addAll(command);
+    return commandList;
+  }
+}
diff --git a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java
index 70c6e17..275e18e 100644
--- a/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java
+++ b/azkaban-hadoop-security-plugin/src/main/java/azkaban/security/HadoopSecurityManager_H_2_0.java
@@ -16,10 +16,6 @@
 
 package azkaban.security;
 
-import azkaban.security.commons.HadoopSecurityManager;
-import azkaban.security.commons.HadoopSecurityManagerException;
-import azkaban.utils.Props;
-import azkaban.utils.UndefinedPropertyException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
 import org.apache.hadoop.fs.FileSystem;
@@ -70,8 +66,30 @@ import java.util.List;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.ConcurrentMap;
 
+import azkaban.security.commons.HadoopSecurityManager;
+import azkaban.security.commons.HadoopSecurityManagerException;
+import azkaban.utils.Props;
+import azkaban.utils.UndefinedPropertyException;
+
 public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
+  /**
+   * TODO Remove duplicated constants from plugins.
+   *
+   * Azkaban plugins don't depend on a common submodule from which they both can inherit code. Thus, constants are
+   * copied around and any changes to the constant values will break Azkaban. This needs to be fixed as part of a
+   * plugin infrastructure implementation.
+   */
+  public static final String NATIVE_LIB_FOLDER = "azkaban.native.lib";
+
+  /**
+   * TODO: This should be exposed as a configurable parameter
+   *
+   * The assumption is that an "azkaban" group exists which has access to data created by the azkaban process. For
+   * example, this may include delegation tokens created for other users to run their jobs.
+   */
+  public static final String GROUP_NAME = "azkaban";
+
   private static final String FS_HDFS_IMPL_DISABLE_CACHE =
       "fs.hdfs.impl.disable.cache";
 
@@ -109,6 +127,11 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
   private static final String AZKABAN_PRINCIPAL = "proxy.user";
   private static final String OBTAIN_JOBHISTORYSERVER_TOKEN =
       "obtain.jobhistoryserver.token";
+  public static final String CHOWN = "chown";
+  public static final String CHMOD = "chmod";
+
+  // The file permissions assigned to a Delegation token file on fetch
+  public static final String TOKEN_FILE_PERMISSIONS = "460";
 
   private UserGroupInformation loginUser = null;
   private final static Logger logger = Logger
@@ -125,11 +148,12 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
   private static URLClassLoader ucl;
 
-  private final RecordFactory recordFactory = RecordFactoryProvider
-      .getRecordFactory(null);
+  private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
+  private final ExecuteAsUser executeAsUser;
 
   private HadoopSecurityManager_H_2_0(Props props)
       throws HadoopSecurityManagerException, IOException {
+    executeAsUser = new ExecuteAsUser(props.getString(NATIVE_LIB_FOLDER));
 
     // for now, assume the same/compatible native library, the same/compatible
     // hadoop-core jar
@@ -339,11 +363,11 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
   public synchronized void prefetchToken(final File tokenFile,
       final String userToProxy, final Logger logger)
       throws HadoopSecurityManagerException {
-
     logger.info("Getting hadoop tokens for " + userToProxy);
 
+    final UserGroupInformation proxiedUser = getProxiedUser(userToProxy);
     try {
-      getProxiedUser(userToProxy).doAs(new PrivilegedExceptionAction<Void>() {
+      proxiedUser.doAs(new PrivilegedExceptionAction<Void>() {
         @Override
         public Void run() throws Exception {
           getToken(userToProxy);
@@ -387,36 +411,13 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
           jc.getCredentials().addToken(mrdt.getService(), mrdt);
           jc.getCredentials().addToken(fsToken.getService(), fsToken);
 
-          FileOutputStream fos = null;
-          DataOutputStream dos = null;
-          try {
-            fos = new FileOutputStream(tokenFile);
-            dos = new DataOutputStream(fos);
-            jc.getCredentials().writeTokenStorageToStream(dos);
-          } finally {
-            if (dos != null) {
-              try {
-                dos.close();
-              } catch (Throwable t) {
-                // best effort
-                logger
-                    .error(
-                        "encountered exception while closing DataOutputStream of the tokenFile",
-                        t);
-              }
-            }
-            if (fos != null) {
-              fos.close();
-            }
-          }
+          prepareTokenFile(userToProxy, jc.getCredentials(), tokenFile, logger);
           // stash them to cancel after use.
           logger.info("Tokens loaded in " + tokenFile.getAbsolutePath());
         }
       });
     } catch (Exception e) {
-      throw new HadoopSecurityManagerException("Failed to get hadoop tokens! "
-          + e.getMessage() + e.getCause());
-
+      throw new HadoopSecurityManagerException("Failed to get hadoop tokens! " + e.getMessage() + e.getCause());
     }
   }
 
@@ -768,28 +769,7 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
         }
       });
 
-      FileOutputStream fos = null;
-      DataOutputStream dos = null;
-      try {
-        fos = new FileOutputStream(tokenFile);
-        dos = new DataOutputStream(fos);
-        cred.writeTokenStorageToStream(dos);
-      } finally {
-        if (dos != null) {
-          try {
-            dos.close();
-          } catch (Throwable t) {
-            // best effort
-            logger
-                .error(
-                    "encountered exception while closing DataOutputStream of the tokenFile",
-                    t);
-          }
-        }
-        if (fos != null) {
-          fos.close();
-        }
-      }
+      prepareTokenFile(userToProxy, cred, tokenFile, logger);
       // stash them to cancel after use.
 
       logger.info("Tokens loaded in " + tokenFile.getAbsolutePath());
@@ -804,6 +784,80 @@ public class HadoopSecurityManager_H_2_0 extends HadoopSecurityManager {
 
   }
 
+  /**
+   * Prepare token file.
+   *  Writes credentials to a token file and sets appropriate permissions to keep the file secure
+   *
+   * @param user user to be proxied
+   * @param credentials Credentials to be written to file
+   * @param tokenFile file to be written
+   * @param logger logger to use
+   * @throws IOException If there are issues in reading / updating the token file
+   */
+  private void prepareTokenFile(final String user,
+                                final Credentials credentials,
+                                final File tokenFile,
+                                final Logger logger) throws IOException {
+    writeCredentialsToFile(credentials, tokenFile, logger);
+    try {
+      assignPermissions(user, tokenFile, logger);
+    } catch (IOException e) {
+      // On any error managing token file. delete the file
+      tokenFile.delete();
+      throw e;
+    }
+  }
+
+  private void writeCredentialsToFile(Credentials credentials, File tokenFile, Logger logger) throws IOException {
+    FileOutputStream fos = null;
+    DataOutputStream dos = null;
+    try {
+      fos = new FileOutputStream(tokenFile);
+      dos = new DataOutputStream(fos);
+      credentials.writeTokenStorageToStream(dos);
+    } finally {
+      if (dos != null) {
+        try {
+          dos.close();
+        } catch (Throwable t) {
+          // best effort
+          logger.error("encountered exception while closing DataOutputStream of the tokenFile", t);
+        }
+      }
+      if (fos != null) {
+        fos.close();
+      }
+    }
+  }
+
+  /**
+   * Uses execute-as-user binary to reassign file permissions to be readable only by that user.
+   *
+   * Step 1. Set file permissions to 460. Readable to self and readable / writable azkaban group
+   * Step 2. Set user as owner of file.
+   *
+   * @param user user to be proxied
+   * @param tokenFile file to be written
+   * @param logger logger to use
+   */
+  private void assignPermissions(String user, File tokenFile, Logger logger) throws IOException {
+    final List<String> changePermissionsCommand = Arrays.asList(
+        CHMOD, TOKEN_FILE_PERMISSIONS, tokenFile.getAbsolutePath()
+    );
+    int result = executeAsUser.execute(System.getProperty("user.name"), changePermissionsCommand);
+    if (result != 0) {
+      throw new IOException("Unable to modify permissions. User: " + user);
+    }
+
+    final List<String> changeOwnershipCommand = Arrays.asList(
+        CHOWN, user + ":" + GROUP_NAME, tokenFile.getAbsolutePath()
+    );
+    result = executeAsUser.execute("root", changeOwnershipCommand);
+    if (result != 0) {
+      throw new IOException("Unable to set ownership. User: " + user);
+    }
+  }
+
   private Text getMRTokenRenewerInternal(JobConf jobConf) throws IOException {
     // Taken from Oozie
     //
diff --git a/azkaban-test/build.gradle b/azkaban-test/build.gradle
index 24aeb40..3d7eab2 100644
--- a/azkaban-test/build.gradle
+++ b/azkaban-test/build.gradle
@@ -1,9 +1,5 @@
 apply plugin: 'distribution'
 
-dependencies {
-  testCompile('junit:junit:4.11')
-}
-
 distributions {
   animal {
     baseName = 'test-animal'
diff --git a/azkaban-web-server/build.gradle b/azkaban-web-server/build.gradle
index 531d2d8..182fdd9 100644
--- a/azkaban-web-server/build.gradle
+++ b/azkaban-web-server/build.gradle
@@ -31,7 +31,6 @@ dependencies {
   compile('org.apache.velocity:velocity-tools:2.0')
 
   testCompile('commons-collections:commons-collections:3.2.2')
-  testCompile('junit:junit:4.11')
   testCompile('org.hamcrest:hamcrest-all:1.3')
   testCompile('org.mockito:mockito-all:1.10.19')
 
@@ -40,7 +39,6 @@ dependencies {
 
   // Needed by Velocity at runtime
   testRuntime('commons-collections:commons-collections:3.2.2')
-  testCompile('junit:junit:4.11')
   testCompile('org.hamcrest:hamcrest-all:1.3')
 }
 

build.gradle 4(+4 -0)

diff --git a/build.gradle b/build.gradle
index 51af06f..23f539d 100644
--- a/build.gradle
+++ b/build.gradle
@@ -44,6 +44,10 @@ subprojects {
         )
       }
     }
+
+    dependencies {
+      testCompile('junit:junit:4.12')
+    }
   }
 
   // Common distribution plugin settings for sub-modules