You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sp...@apache.org on 2016/05/27 15:37:24 UTC

[03/48] hive git commit: HIVE-13551: Make cleardanglingscratchdir work on Windows (Daniel Dai, reviewed by Thejas Nair)

HIVE-13551: Make cleardanglingscratchdir work on Windows (Daniel Dai, reviewed by Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/ae6ad6d1
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/ae6ad6d1
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/ae6ad6d1

Branch: refs/heads/java8
Commit: ae6ad6d1b7423d47080b7777cc43298af70c25ef
Parents: b93ce78
Author: Daniel Dai <da...@hortonworks.com>
Authored: Wed May 25 15:00:05 2016 -0700
Committer: Daniel Dai <da...@hortonworks.com>
Committed: Wed May 25 15:01:23 2016 -0700

----------------------------------------------------------------------
 bin/ext/cleardanglingscratchdir.cmd             |  1 -
 bin/hive.cmd                                    |  3 ++
 .../ql/session/TestClearDanglingScratchDir.java |  5 +++
 .../ql/session/ClearDanglingScratchDir.java     | 46 +++++++++++++++-----
 4 files changed, 44 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/ae6ad6d1/bin/ext/cleardanglingscratchdir.cmd
----------------------------------------------------------------------
diff --git a/bin/ext/cleardanglingscratchdir.cmd b/bin/ext/cleardanglingscratchdir.cmd
index 31104af..1bb0453 100644
--- a/bin/ext/cleardanglingscratchdir.cmd
+++ b/bin/ext/cleardanglingscratchdir.cmd
@@ -16,7 +16,6 @@
 
 set CLASS=org.apache.hadoop.hive.ql.session.ClearDanglingScratchDir
 set HIVE_OPTS=
-set HADOOP_CLASSPATH=
 
 pushd %HIVE_LIB%
 for /f %%a IN ('dir /b hive-exec-*.jar') do (

http://git-wip-us.apache.org/repos/asf/hive/blob/ae6ad6d1/bin/hive.cmd
----------------------------------------------------------------------
diff --git a/bin/hive.cmd b/bin/hive.cmd
index 9080796..79d6d1b 100644
--- a/bin/hive.cmd
+++ b/bin/hive.cmd
@@ -361,6 +361,9 @@ goto :EOF
 
 	set /a SERVICE_COUNT = %SERVICE_COUNT% + 1
 	set VAR%SERVICE_COUNT%=schematool
+
+        set /a SERVICE_COUNT = %SERVICE_COUNT% + 1
+        set VAR%SERVICE_COUNT%=cleardanglingscratchdir
 goto :EOF
 
 :AddToAuxParam

http://git-wip-us.apache.org/repos/asf/hive/blob/ae6ad6d1/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java
index 3cb80a7..185dbd5 100644
--- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java
+++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/session/TestClearDanglingScratchDir.java
@@ -28,6 +28,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.WindowsPathUtil;
+import org.apache.hadoop.util.Shell;
 import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -47,6 +49,9 @@ public class TestClearDanglingScratchDir {
   static public void oneTimeSetup() throws Exception {
     m_dfs = new MiniDFSCluster.Builder(new Configuration()).numDataNodes(1).format(true).build();
     conf = new HiveConf();
+    if (Shell.WINDOWS) {
+      WindowsPathUtil.convertPathsFromWindowsToHdfs(conf);
+    }
     conf.set(HiveConf.ConfVars.HIVE_SCRATCH_DIR_LOCK.toString(), "true");
     conf.set(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.toString(), "true");
     LoggerFactory.getLogger("SessionState");

http://git-wip-us.apache.org/repos/asf/hive/blob/ae6ad6d1/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java b/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java
index 8543768..ee012c2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/ClearDanglingScratchDir.java
@@ -25,6 +25,7 @@ import org.apache.commons.cli.GnuParser;
 import org.apache.commons.cli.HelpFormatter;
 import org.apache.commons.cli.OptionBuilder;
 import org.apache.commons.cli.Options;
+import org.apache.hadoop.fs.FileAlreadyExistsException;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -66,6 +67,7 @@ public class ClearDanglingScratchDir {
 
     if (cli.hasOption("r")) {
       dryRun = true;
+      SessionState.getConsole().printInfo("dry-run mode on");
     }
 
     if (cli.hasOption("v")) {
@@ -99,24 +101,48 @@ public class ClearDanglingScratchDir {
           }
           continue;
         }
+        boolean removable = false;
+        boolean inuse = false;
         try {
           IOUtils.closeStream(fs.append(lockFilePath));
-          scratchDirToRemove.add(scratchDir.getPath());
-        } catch (RemoteException e) {
+          removable = true;
+        } catch (RemoteException eAppend) {
           // RemoteException with AlreadyBeingCreatedException will be thrown
           // if the file is currently held by a writer
-          if(AlreadyBeingCreatedException.class.getName().equals(e.getClassName())){
-            // Cannot open the lock file for writing, must be held by a live process
-            String message = scratchDir.getPath() + " is being used by live process";
-            if (verbose) {
-              SessionState.getConsole().printInfo(message);
-            } else {
-              SessionState.getConsole().logInfo(message);
+          if(AlreadyBeingCreatedException.class.getName().equals(eAppend.getClassName())){
+            inuse = true;
+          } else if (UnsupportedOperationException.class.getName().equals(eAppend.getClassName())) {
+            // Append is not supported in the cluster, try to use create
+            try {
+              IOUtils.closeStream(fs.create(lockFilePath, false));
+            } catch (RemoteException eCreate) {
+              if (AlreadyBeingCreatedException.class.getName().equals(eCreate.getClassName())){
+                // If the file is held by a writer, will throw AlreadyBeingCreatedException
+                inuse = true;
+              }  else {
+                SessionState.getConsole().printInfo("Unexpected error:" + eCreate.getMessage());
+              }
+            } catch (FileAlreadyExistsException eCreateNormal) {
+                // Otherwise, throw FileAlreadyExistsException, which means the file owner is
+                // dead
+                removable = true;
             }
           } else {
-            throw e;
+            SessionState.getConsole().printInfo("Unexpected error:" + eAppend.getMessage());
           }
         }
+        if (inuse) {
+          // Cannot open the lock file for writing, must be held by a live process
+          String message = scratchDir.getPath() + " is being used by live process";
+          if (verbose) {
+            SessionState.getConsole().printInfo(message);
+          } else {
+            SessionState.getConsole().logInfo(message);
+          }
+        }
+        if (removable) {
+          scratchDirToRemove.add(scratchDir.getPath());
+        }
       }
     }