hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sze...@apache.org
Subject svn commit: r1672733 - /hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
Date Fri, 10 Apr 2015 19:34:19 GMT
Author: szehon
Date: Fri Apr 10 19:34:19 2015
New Revision: 1672733

URL: http://svn.apache.org/r1672733
Log:
HIVE-10291 : Hive on Spark job configuration needs to be logged [Spark Branch] (Szehon, reviewed
by Chengxiang and Xuefu)

Modified:
    hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java

Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java?rev=1672733&r1=1672732&r2=1672733&view=diff
==============================================================================
--- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
(original)
+++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java
Fri Apr 10 19:34:19 2015
@@ -22,6 +22,7 @@ import com.google.common.base.Strings;
 
 import java.io.IOException;
 import java.io.Serializable;
+import java.io.StringWriter;
 import java.net.URI;
 import java.net.URISyntaxException;
 import java.util.ArrayList;
@@ -33,6 +34,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -231,6 +233,7 @@ public class RemoteHiveSparkClient imple
 
       Path localScratchDir = KryoSerializer.deserialize(scratchDirBytes, Path.class);
       SparkWork localSparkWork = KryoSerializer.deserialize(sparkWorkBytes, SparkWork.class);
+      logConfigurations(localJobConf);
 
       SparkCounters sparkCounters = new SparkCounters(jc.sc());
       Map<String, List<String>> prefixes = localSparkWork.getRequiredCounterPrefix();
@@ -255,6 +258,18 @@ public class RemoteHiveSparkClient imple
       jc.monitor(future, sparkCounters, plan.getCachedRDDIds());
       return null;
     }
-  }
 
+    private void logConfigurations(JobConf localJobConf) {
+      if (LOG.isInfoEnabled()) {
+        LOG.info("Logging job configuration: ");
+        StringWriter outWriter = new StringWriter();
+        try {
+          Configuration.dumpConfiguration(localJobConf, outWriter);
+        } catch (IOException e) {
+          LOG.warn("Error logging job configuration", e);
+        }
+        LOG.info(outWriter.toString());
+      }
+    }
+  }
 }



Mime
View raw message