You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by ao...@apache.org on 2017/11/27 13:00:03 UTC
[08/16] ambari git commit: AMBARI-21569.Users randomly getting
"HDFS020 Could not write file" exceptions while running query from Hive
View(Venkata Sairam)
AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c57e243d
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c57e243d
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c57e243d
Branch: refs/heads/branch-feature-AMBARI-22457
Commit: c57e243d2d0c2f480b56693a39e97bb81e258da6
Parents: 099e018
Author: Venkata Sairam <ve...@gmail.com>
Authored: Thu Nov 23 15:52:55 2017 +0530
Committer: Venkata Sairam <ve...@gmail.com>
Committed: Thu Nov 23 15:52:55 2017 +0530
----------------------------------------------------------------------
.../org/apache/ambari/view/utils/hdfs/HdfsApi.java | 16 +++++++++++++++-
.../apache/ambari/view/utils/hdfs/HdfsUtil.java | 17 ++++++++++++-----
2 files changed, 27 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 3db2081..812cd54 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -485,7 +485,20 @@ public class HdfsApi {
* @throws IOException
* @throws InterruptedException
*/
- public <T> T execute(PrivilegedExceptionAction<T> action)
+ public <T> T execute(PrivilegedExceptionAction<T> action) throws IOException, InterruptedException {
+ return this.execute(action, false);
+ }
+
+
+ /**
+ * Executes action on HDFS using doAs
+ * @param action strategy object
+ * @param <T> result type
+ * @return result of operation
+ * @throws IOException
+ * @throws InterruptedException
+ */
+ public <T> T execute(PrivilegedExceptionAction<T> action, boolean alwaysRetry)
throws IOException, InterruptedException {
T result = null;
@@ -508,6 +521,7 @@ public class HdfsApi {
}
LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " +
"Retrying... Try #" + (tryNumber + 1));
+ LOG.error("Retrying: " + ex.getMessage(),ex);
Thread.sleep(1000); //retry after 1 second
}
} while (!succeeded);
http://git-wip-us.apache.org/repos/asf/ambari/blob/c57e243d/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
index 0670f1a..810129b 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
@@ -27,6 +27,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
import java.util.Map;
public class HdfsUtil {
@@ -38,13 +39,19 @@ public class HdfsUtil {
* @param filePath path to file
* @param content new content of file
*/
- public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException {
- FSDataOutputStream stream;
+ public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException {
+
try {
synchronized (hdfs) {
- stream = hdfs.create(filePath, true);
- stream.write(content.getBytes());
- stream.close();
+ hdfs.execute(new PrivilegedExceptionAction<Void>() {
+ @Override
+ public Void run() throws Exception {
+ final FSDataOutputStream stream = hdfs.create(filePath, true);
+ stream.write(content.getBytes());
+ stream.close();
+ return null;
+ }
+ }, true);
}
} catch (IOException e) {
throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);