Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 52C0C17B24 for ; Fri, 10 Apr 2015 19:34:20 +0000 (UTC) Received: (qmail 84266 invoked by uid 500); 10 Apr 2015 19:34:20 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 84227 invoked by uid 500); 10 Apr 2015 19:34:20 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 84216 invoked by uid 99); 10 Apr 2015 19:34:20 -0000 Received: from eris.apache.org (HELO hades.apache.org) (140.211.11.105) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 10 Apr 2015 19:34:20 +0000 Received: from hades.apache.org (localhost [127.0.0.1]) by hades.apache.org (ASF Mail Server at hades.apache.org) with ESMTP id 0D728AC00B4 for ; Fri, 10 Apr 2015 19:34:20 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1672733 - /hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java Date: Fri, 10 Apr 2015 19:34:19 -0000 To: commits@hive.apache.org From: szehon@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20150410193420.0D728AC00B4@hades.apache.org> Author: szehon Date: Fri Apr 10 19:34:19 2015 New Revision: 1672733 URL: http://svn.apache.org/r1672733 Log: HIVE-10291 : Hive on Spark job configuration needs to be logged [Spark Branch] (Szehon, reviewed by Chengxiang and Xuefu) Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java Modified: hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java URL: http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java?rev=1672733&r1=1672732&r2=1672733&view=diff ============================================================================== --- hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java (original) +++ hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/RemoteHiveSparkClient.java Fri Apr 10 19:34:19 2015 @@ -22,6 +22,7 @@ import com.google.common.base.Strings; import java.io.IOException; import java.io.Serializable; +import java.io.StringWriter; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; @@ -33,6 +34,7 @@ import java.util.concurrent.TimeUnit; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; @@ -231,6 +233,7 @@ public class RemoteHiveSparkClient imple Path localScratchDir = KryoSerializer.deserialize(scratchDirBytes, Path.class); SparkWork localSparkWork = KryoSerializer.deserialize(sparkWorkBytes, SparkWork.class); + logConfigurations(localJobConf); SparkCounters sparkCounters = new SparkCounters(jc.sc()); Map> prefixes = localSparkWork.getRequiredCounterPrefix(); @@ -255,6 +258,18 @@ public class RemoteHiveSparkClient imple jc.monitor(future, sparkCounters, plan.getCachedRDDIds()); return null; } - } + private void logConfigurations(JobConf localJobConf) { + if (LOG.isInfoEnabled()) { + LOG.info("Logging job configuration: "); + StringWriter outWriter = new StringWriter(); + try { + Configuration.dumpConfiguration(localJobConf, outWriter); + } catch (IOException e) { + LOG.warn("Error logging job configuration", e); + } + LOG.info(outWriter.toString()); + } + } + } }