ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vsai...@apache.org
Subject ambari git commit: AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View(Venkata Sairam)
Date Thu, 23 Nov 2017 10:21:48 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.5 603c3fd74 -> adc1fed15


AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running
query from Hive View(Venkata Sairam)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/adc1fed1
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/adc1fed1
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/adc1fed1

Branch: refs/heads/branch-2.5
Commit: adc1fed15f57b427ee6fbaeb4b747004492f49e4
Parents: 603c3fd
Author: Venkata Sairam <venkatasairam.lanka@gmail.com>
Authored: Thu Nov 23 15:50:22 2017 +0530
Committer: Venkata Sairam <venkatasairam.lanka@gmail.com>
Committed: Thu Nov 23 15:50:22 2017 +0530

----------------------------------------------------------------------
 .../apache/ambari/view/utils/hdfs/HdfsApi.java    | 18 ++++++++++++++++--
 .../apache/ambari/view/utils/hdfs/HdfsUtil.java   | 17 ++++++++++++-----
 2 files changed, 28 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/adc1fed1/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 90fa483..5bce7ba 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -386,7 +386,20 @@ public class HdfsApi {
    * @throws IOException
    * @throws InterruptedException
    */
-  public <T> T execute(PrivilegedExceptionAction<T> action)
+  public <T> T execute(PrivilegedExceptionAction<T> action) throws IOException,
InterruptedException {
+    return this.execute(action, false);
+  }
+
+
+  /**
+   * Executes action on HDFS using doAs
+   * @param action strategy object
+   * @param <T> result type
+   * @return result of operation
+   * @throws IOException
+   * @throws InterruptedException
+   */
+  public <T> T execute(PrivilegedExceptionAction<T> action, boolean alwaysRetry)
       throws IOException, InterruptedException {
     T result = null;
 
@@ -401,7 +414,7 @@ public class HdfsApi {
         result = ugi.doAs(action);
         succeeded = true;
       } catch (IOException ex) {
-        if (!ex.getMessage().contains("Cannot obtain block length for")) {
+        if (!alwaysRetry && !ex.getMessage().contains("Cannot obtain block length
for")) {
           throw ex;
         }
         if (tryNumber >= 3) {
@@ -409,6 +422,7 @@ public class HdfsApi {
         }
         LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " +
             "Retrying... Try #" + (tryNumber + 1));
+        LOG.error("Retrying: " + ex.getMessage(),ex);
         Thread.sleep(1000);  //retry after 1 second
       }
     } while (!succeeded);

http://git-wip-us.apache.org/repos/asf/ambari/blob/adc1fed1/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
----------------------------------------------------------------------
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
index 0670f1a..810129b 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
@@ -27,6 +27,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
 import java.util.Map;
 
 public class HdfsUtil {
@@ -38,13 +39,19 @@ public class HdfsUtil {
    * @param filePath path to file
    * @param content new content of file
    */
-  public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws
HdfsApiException {
-    FSDataOutputStream stream;
+  public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String
content) throws HdfsApiException {
+
     try {
       synchronized (hdfs) {
-        stream = hdfs.create(filePath, true);
-        stream.write(content.getBytes());
-        stream.close();
+        hdfs.execute(new PrivilegedExceptionAction<Void>() {
+          @Override
+          public Void run() throws Exception {
+            final FSDataOutputStream stream = hdfs.create(filePath, true);
+            stream.write(content.getBytes());
+            stream.close();
+            return null;
+          }
+        }, true);
       }
     } catch (IOException e) {
       throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);


Mime
View raw message