Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id DAC751804E for ; Mon, 17 Aug 2015 22:00:45 +0000 (UTC) Received: (qmail 86689 invoked by uid 500); 17 Aug 2015 22:00:34 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 86589 invoked by uid 500); 17 Aug 2015 22:00:34 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 83580 invoked by uid 99); 17 Aug 2015 22:00:32 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 17 Aug 2015 22:00:32 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 6C07AE0441; Mon, 17 Aug 2015 22:00:32 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: sershe@apache.org To: commits@hive.apache.org Date: Mon, 17 Aug 2015 22:01:04 -0000 Message-Id: <353c79325b4143d8b43f342c009688a7@git.apache.org> In-Reply-To: References: X-Mailer: ASF-Git Admin Mailer Subject: [34/50] [abbrv] hive git commit: HIVE-11304: Migrate to Log4j2 from Log4j 1.x (Prasanth Jayachandran reviewed by Thejas Nair, Sergey Shelukhin) HIVE-11304: Migrate to Log4j2 from Log4j 1.x (Prasanth Jayachandran reviewed by Thejas Nair, Sergey Shelukhin) Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c93d6c77 Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c93d6c77 Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c93d6c77 Branch: refs/heads/hbase-metastore Commit: c93d6c77e31e2eb9b40f5167ab3491d44eae351a Parents: a4849cb Author: Prasanth Jayachandran Authored: Fri Aug 14 10:17:20 2015 -0700 Committer: Prasanth Jayachandran Committed: Fri Aug 14 10:17:20 2015 -0700 ---------------------------------------------------------------------- accumulo-handler/pom.xml | 4 - .../src/main/resources/beeline-log4j.properties | 24 -- beeline/src/main/resources/beeline-log4j2.xml | 40 ++++ bin/ext/beeline.sh | 2 +- bin/hive | 3 + common/pom.xml | 22 +- .../org/apache/hadoop/hive/common/LogUtils.java | 18 +- .../org/apache/hadoop/hive/conf/HiveConf.java | 8 +- common/src/main/resources/hive-log4j.properties | 88 -------- common/src/main/resources/hive-log4j2.xml | 111 +++++++++ .../hadoop/hive/conf/TestHiveLogging.java | 8 +- .../resources/hive-exec-log4j-test.properties | 59 ----- .../test/resources/hive-exec-log4j2-test.xml | 86 +++++++ .../test/resources/hive-log4j-test.properties | 71 ------ common/src/test/resources/hive-log4j2-test.xml | 95 ++++++++ data/conf/hive-log4j-old.properties | 82 ------- data/conf/hive-log4j.properties | 97 -------- data/conf/hive-log4j2.xml | 148 ++++++++++++ data/conf/spark/log4j.properties | 24 -- data/conf/spark/log4j2.xml | 74 ++++++ docs/xdocs/language_manual/cli.xml | 2 +- hcatalog/bin/hcat_server.sh | 2 +- hcatalog/bin/templeton.cmd | 4 +- hcatalog/scripts/hcat_server_start.sh | 2 +- .../content/xdocs/configuration.xml | 2 +- .../src/documentation/content/xdocs/install.xml | 2 +- .../deployers/config/hive/hive-log4j.properties | 88 -------- .../deployers/config/hive/hive-log4j2.xml | 111 +++++++++ .../templeton/deployers/start_hive_services.sh | 2 +- .../webhcat/svr/src/main/bin/webhcat_server.sh | 4 +- .../src/main/config/webhcat-log4j.properties | 45 ---- .../svr/src/main/config/webhcat-log4j2.xml | 75 +++++++ .../main/java/org/apache/hive/hplsql/Exec.java | 2 + .../operation/TestOperationLoggingLayout.java | 136 +++++++++++ itests/pom.xml | 2 +- .../hadoop/hive/metastore/HiveMetaStore.java | 2 +- .../metastore/txn/TestCompactionTxnHandler.java | 40 +++- .../hive/metastore/txn/TestTxnHandler.java | 66 ++++-- packaging/src/main/assembly/bin.xml | 14 +- pom.xml | 37 ++- ql/pom.xml | 17 +- .../hadoop/hive/ql/exec/mr/ExecDriver.java | 29 ++- .../hive/ql/exec/mr/HadoopJobExecHelper.java | 20 +- .../ql/io/rcfile/stats/PartialScanTask.java | 20 +- .../hadoop/hive/ql/log/HiveEventCounter.java | 135 +++++++++++ .../apache/hadoop/hive/ql/log/NullAppender.java | 63 ++++++ .../ql/log/PidDailyRollingFileAppender.java | 33 --- .../hive/ql/log/PidFilePatternConverter.java | 62 ++++++ .../main/resources/hive-exec-log4j.properties | 77 ------- ql/src/main/resources/hive-exec-log4j2.xml | 110 +++++++++ .../hadoop/hive/ql/log/TestLog4j2Appenders.java | 95 ++++++++ .../hadoop/hive/ql/metadata/StringAppender.java | 128 +++++++++++ .../hadoop/hive/ql/metadata/TestHive.java | 50 +++-- .../hive/service/cli/CLIServiceUtils.java | 7 - .../cli/operation/LogDivertAppender.java | 223 +++++++++++-------- .../service/cli/operation/OperationManager.java | 17 +- shims/common/pom.xml | 17 +- .../hadoop/hive/shims/HiveEventCounter.java | 102 --------- .../src/test/resources/log4j.properties | 23 -- spark-client/src/test/resources/log4j2.xml | 39 ++++ storage-api/pom.xml | 7 - testutils/ptest2/pom.xml | 20 ++ .../ptest2/src/main/resources/log4j.properties | 37 --- testutils/ptest2/src/main/resources/log4j2.xml | 79 +++++++ 64 files changed, 1989 insertions(+), 1123 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/accumulo-handler/pom.xml ---------------------------------------------------------------------- diff --git a/accumulo-handler/pom.xml b/accumulo-handler/pom.xml index ee40f72..4e3a087 100644 --- a/accumulo-handler/pom.xml +++ b/accumulo-handler/pom.xml @@ -91,10 +91,6 @@ slf4j-api - org.slf4j - slf4j-log4j12 - - junit junit test http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/beeline/src/main/resources/beeline-log4j.properties ---------------------------------------------------------------------- diff --git a/beeline/src/main/resources/beeline-log4j.properties b/beeline/src/main/resources/beeline-log4j.properties deleted file mode 100644 index fe47d94..0000000 --- a/beeline/src/main/resources/beeline-log4j.properties +++ /dev/null @@ -1,24 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=WARN, console - -######## console appender ######## -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/beeline/src/main/resources/beeline-log4j2.xml ---------------------------------------------------------------------- diff --git a/beeline/src/main/resources/beeline-log4j2.xml b/beeline/src/main/resources/beeline-log4j2.xml new file mode 100644 index 0000000..5f09741 --- /dev/null +++ b/beeline/src/main/resources/beeline-log4j2.xml @@ -0,0 +1,40 @@ + + + + + + + ALL + WARN + console + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/bin/ext/beeline.sh ---------------------------------------------------------------------- diff --git a/bin/ext/beeline.sh b/bin/ext/beeline.sh index ab3dc1a..9de8f6c 100644 --- a/bin/ext/beeline.sh +++ b/bin/ext/beeline.sh @@ -31,7 +31,7 @@ beeline () { hadoopClasspath="${HADOOP_CLASSPATH}:" fi export HADOOP_CLASSPATH="${hadoopClasspath}${HIVE_CONF_DIR}:${beelineJarPath}:${superCsvJarPath}:${jlineJarPath}:${jdbcStandaloneJarPath}" - export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configuration=beeline-log4j.properties " + export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=beeline-log4j2.xml " exec $HADOOP jar ${beelineJarPath} $CLASS $HIVE_OPTS "$@" } http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/bin/hive ---------------------------------------------------------------------- diff --git a/bin/hive b/bin/hive index 5dc93fb..ad7139e 100755 --- a/bin/hive +++ b/bin/hive @@ -193,6 +193,9 @@ if [ "$HADOOP_HOME" == "" ]; then exit 4; fi +# to avoid errors from log4j2 automatic configuration loading +export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=hive-log4j2.xml " + HADOOP=$HADOOP_HOME/bin/hadoop if [ ! -f ${HADOOP} ]; then echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path"; http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/pom.xml ---------------------------------------------------------------------- diff --git a/common/pom.xml b/common/pom.xml index a7997e2..dba814d 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -66,14 +66,24 @@ ${joda.version} - log4j - log4j - ${log4j.version} + org.apache.logging.log4j + log4j-1.2-api + ${log4j2.version} - log4j - apache-log4j-extras - ${log4j-extras.version} + org.apache.logging.log4j + log4j-web + ${log4j2.version} + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j2.version} + + + org.apache.logging.log4j + log4j-jcl + ${log4j2.version} org.apache.commons http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/java/org/apache/hadoop/hive/common/LogUtils.java ---------------------------------------------------------------------- diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java index 9118675..3ca5c0f 100644 --- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java +++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java @@ -18,26 +18,22 @@ package org.apache.hadoop.hive.common; -import java.net.URL; import java.io.File; -import java.io.IOException; -import java.io.FileNotFoundException; +import java.net.URL; -import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.log4j.LogManager; -import org.apache.log4j.PropertyConfigurator; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.logging.log4j.core.config.Configurator; /** * Utilities common to logging operations. */ public class LogUtils { - private static final String HIVE_L4J = "hive-log4j.properties"; - private static final String HIVE_EXEC_L4J = "hive-exec-log4j.properties"; + private static final String HIVE_L4J = "hive-log4j2.xml"; + private static final String HIVE_EXEC_L4J = "hive-exec-log4j2.xml"; private static final Log l4j = LogFactory.getLog(LogUtils.class); @SuppressWarnings("serial") @@ -95,8 +91,7 @@ public class LogUtils { } System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId); } - LogManager.resetConfiguration(); - PropertyConfigurator.configure(log4jFileName); + Configurator.initialize(null, log4jFileName); logConfigLocation(conf); return ("Logging initialized using configuration in " + log4jConfigFile); } @@ -123,8 +118,7 @@ public class LogUtils { break; } if (hive_l4j != null) { - LogManager.resetConfiguration(); - PropertyConfigurator.configure(hive_l4j); + Configurator.initialize(null, hive_l4j.toString()); logConfigLocation(conf); return (logMessage + "\n" + "Logging initialized using configuration in " + hive_l4j); } else { http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java ---------------------------------------------------------------------- diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 11b9f78..730f5be 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1696,13 +1696,13 @@ public class HiveConf extends Configuration { // logging configuration HIVE_LOG4J_FILE("hive.log4j.file", "", "Hive log4j configuration file.\n" + - "If the property is not set, then logging will be initialized using hive-log4j.properties found on the classpath.\n" + - "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" + + "If the property is not set, then logging will be initialized using hive-log4j2.xml found on the classpath.\n" + + "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" + "which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."), HIVE_EXEC_LOG4J_FILE("hive.exec.log4j.file", "", "Hive log4j configuration file for execution mode(sub command).\n" + - "If the property is not set, then logging will be initialized using hive-exec-log4j.properties found on the classpath.\n" + - "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.properties\"), \n" + + "If the property is not set, then logging will be initialized using hive-exec-log4j2.xml found on the classpath.\n" + + "If the property is set, the value must be a valid URI (java.net.URI, e.g. \"file:///tmp/my-logging.xml\"), \n" + "which you can then extract a URL from and pass to PropertyConfigurator.configure(URL)."), HIVE_LOG_EXPLAIN_OUTPUT("hive.log.explain.output", false, http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/main/resources/hive-log4j.properties ---------------------------------------------------------------------- diff --git a/common/src/main/resources/hive-log4j.properties b/common/src/main/resources/hive-log4j.properties deleted file mode 100644 index 14fa725..0000000 --- a/common/src/main/resources/hive-log4j.properties +++ /dev/null @@ -1,88 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.log.threshold=ALL -hive.root.logger=INFO,DRFA -hive.log.dir=${java.io.tmpdir}/${user.name} -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshold=${hive.log.threshold} - -# -# Daily Rolling File Appender -# -# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files -# for different CLI session. -# -# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/main/resources/hive-log4j2.xml ---------------------------------------------------------------------- diff --git a/common/src/main/resources/hive-log4j2.xml b/common/src/main/resources/hive-log4j2.xml new file mode 100644 index 0000000..31b8fcc --- /dev/null +++ b/common/src/main/resources/hive-log4j2.xml @@ -0,0 +1,111 @@ + + + + + + + ALL + INFO + DRFA + ${sys:java.io.tmpdir}/${sys:user.name} + hive.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java ---------------------------------------------------------------------- diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java index d5cedb1..92269e7 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveLogging.java @@ -21,12 +21,12 @@ import java.io.BufferedReader; import java.io.File; import java.io.InputStreamReader; -import junit.framework.TestCase; - import org.apache.hadoop.hive.common.LogUtils; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.common.util.HiveTestUtils; +import junit.framework.TestCase; + /** * TestHiveLogging * @@ -104,9 +104,9 @@ public class TestHiveLogging extends TestCase { // customized log4j config log file to be: /${test.tmp.dir}/TestHiveLogging/hiveLog4jTest.log File customLogPath = new File(new File(System.getProperty("test.tmp.dir")), System.getProperty("user.name") + "-TestHiveLogging/"); - String customLogName = "hiveLog4jTest.log"; + String customLogName = "hiveLog4j2Test.log"; File customLogFile = new File(customLogPath, customLogName); RunTest(customLogFile, - "hive-log4j-test.properties", "hive-exec-log4j-test.properties"); + "hive-log4j2-test.xml", "hive-exec-log4j2-test.xml"); } } http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-exec-log4j-test.properties ---------------------------------------------------------------------- diff --git a/common/src/test/resources/hive-exec-log4j-test.properties b/common/src/test/resources/hive-exec-log4j-test.properties deleted file mode 100644 index 1e53f26..0000000 --- a/common/src/test/resources/hive-exec-log4j-test.properties +++ /dev/null @@ -1,59 +0,0 @@ -# Define some default values that can be overridden by system properties -hive.root.logger=INFO,FA -hive.log.dir=/${test.tmp.dir}/${user.name}-TestHiveLogging -hive.log.file=hiveExecLog4jTest.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# File Appender -# - -log4j.appender.FA=org.apache.log4j.FileAppender -log4j.appender.FA.File=${hive.log.dir}/${hive.log.file} -log4j.appender.FA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.FA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter - - -log4j.category.DataNucleus=ERROR,FA -log4j.category.Datastore=ERROR,FA -log4j.category.Datastore.Schema=ERROR,FA -log4j.category.JPOX.Datastore=ERROR,FA -log4j.category.JPOX.Plugin=ERROR,FA -log4j.category.JPOX.MetaData=ERROR,FA -log4j.category.JPOX.Query=ERROR,FA -log4j.category.JPOX.General=ERROR,FA -log4j.category.JPOX.Enhancer=ERROR,FA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,FA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,FA http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-exec-log4j2-test.xml ---------------------------------------------------------------------- diff --git a/common/src/test/resources/hive-exec-log4j2-test.xml b/common/src/test/resources/hive-exec-log4j2-test.xml new file mode 100644 index 0000000..b5f2cb4 --- /dev/null +++ b/common/src/test/resources/hive-exec-log4j2-test.xml @@ -0,0 +1,86 @@ + + + + + + + ALL + INFO + FA + ${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging + hiveExecLog4j2Test.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-log4j-test.properties ---------------------------------------------------------------------- diff --git a/common/src/test/resources/hive-log4j-test.properties b/common/src/test/resources/hive-log4j-test.properties deleted file mode 100644 index 0348325..0000000 --- a/common/src/test/resources/hive-log4j-test.properties +++ /dev/null @@ -1,71 +0,0 @@ -# Define some default values that can be overridden by system properties -hive.root.logger=WARN,DRFA -hive.log.dir=${test.tmp.dir}/${user.name}-TestHiveLogging -hive.log.file=hiveLog4jTest.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# Daily Rolling File Appender -# -# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files -# for different CLI session. -# -# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/common/src/test/resources/hive-log4j2-test.xml ---------------------------------------------------------------------- diff --git a/common/src/test/resources/hive-log4j2-test.xml b/common/src/test/resources/hive-log4j2-test.xml new file mode 100644 index 0000000..63b46c8 --- /dev/null +++ b/common/src/test/resources/hive-log4j2-test.xml @@ -0,0 +1,95 @@ + + + + + + + ALL + WARN + DRFA + ${sys:test.tmp.dir}/${sys:user.name}-TestHiveLogging + hiveLog4j2Test.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/hive-log4j-old.properties ---------------------------------------------------------------------- diff --git a/data/conf/hive-log4j-old.properties b/data/conf/hive-log4j-old.properties deleted file mode 100644 index f274b8c..0000000 --- a/data/conf/hive-log4j-old.properties +++ /dev/null @@ -1,82 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.root.logger=DEBUG,DRFA -hive.log.dir=${build.dir.hive}/ql/tmp/ -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# Daily Rolling File Appender -# - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA -log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/hive-log4j.properties ---------------------------------------------------------------------- diff --git a/data/conf/hive-log4j.properties b/data/conf/hive-log4j.properties deleted file mode 100644 index 023e3c2..0000000 --- a/data/conf/hive-log4j.properties +++ /dev/null @@ -1,97 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.root.logger=DEBUG,DRFA -hive.log.dir=${test.tmp.dir}/log/ -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshhold=WARN - -# -# Daily Rolling File Appender -# - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n - -# Warning: If you enable the following appender it will fill up your disk if you don't have a cleanup job! -# This uses the updated rolling file appender from log4j-extras that supports a reliable time-based rolling policy. -# See http://logging.apache.org/log4j/companions/extras/apidocs/org/apache/log4j/rolling/TimeBasedRollingPolicy.html -# Add "DAILY" to hive.root.logger above if you want to use this. -log4j.appender.DAILY=org.apache.log4j.rolling.RollingFileAppender -log4j.appender.DAILY.rollingPolicy=org.apache.log4j.rolling.TimeBasedRollingPolicy -log4j.appender.DAILY.rollingPolicy.ActiveFileName=${hive.log.dir}/${hive.log.file} -log4j.appender.DAILY.rollingPolicy.FileNamePattern=${hive.log.dir}/${hive.log.file}.%d{yyyy-MM-dd} -log4j.appender.DAILY.layout=org.apache.log4j.PatternLayout -log4j.appender.DAILY.layout.ConversionPattern=%d{dd MMM yyyy HH:mm:ss,SSS} %-5p [%t] (%C.%M:%L) %x - %m%n - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA -log4j.logger.org.apache.hadoop.conf.Configuration=ERROR,DRFA -log4j.logger.org.apache.zookeeper=INFO,DRFA -log4j.logger.org.apache.zookeeper.server.ServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocket=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA -log4j.logger.org.apache.hadoop.hive.ql.log.PerfLogger=${hive.ql.log.PerfLogger.level} -log4j.logger.org.apache.hadoop.hive.ql.exec.Operator=INFO,DRFA -log4j.logger.org.apache.hadoop.hive.serde2.lazy=INFO,DRFA -log4j.logger.org.apache.hadoop.hive.metastore.ObjectStore=INFO,DRFA http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/hive-log4j2.xml ---------------------------------------------------------------------- diff --git a/data/conf/hive-log4j2.xml b/data/conf/hive-log4j2.xml new file mode 100644 index 0000000..c9adfa2 --- /dev/null +++ b/data/conf/hive-log4j2.xml @@ -0,0 +1,148 @@ + + + + + + + ALL + DEBUG + DRFA + ${sys:test.tmp.dir}/log + hive.log + INFO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/spark/log4j.properties ---------------------------------------------------------------------- diff --git a/data/conf/spark/log4j.properties b/data/conf/spark/log4j.properties deleted file mode 100644 index 8838c0e..0000000 --- a/data/conf/spark/log4j.properties +++ /dev/null @@ -1,24 +0,0 @@ -log4j.rootCategory=DEBUG, DRFA - -hive.spark.log.dir=target/tmp/log -# Settings to quiet third party logs that are too verbose -log4j.logger.org.eclipse.jetty=WARN -log4j.logger.org.eclipse.jetty.util.component.AbstractLifeCycle=ERROR -log4j.logger.org.apache.spark.repl.SparkIMain$exprTyper=INFO -log4j.logger.org.apache.spark.repl.SparkILoop$SparkILoopInterpreter=INFO - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.spark.log.dir}/spark.log - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/data/conf/spark/log4j2.xml ---------------------------------------------------------------------- diff --git a/data/conf/spark/log4j2.xml b/data/conf/spark/log4j2.xml new file mode 100644 index 0000000..395a2bf --- /dev/null +++ b/data/conf/spark/log4j2.xml @@ -0,0 +1,74 @@ + + + + + + + DEBUG + DRFA + target/tmp/log + spark.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/docs/xdocs/language_manual/cli.xml ---------------------------------------------------------------------- diff --git a/docs/xdocs/language_manual/cli.xml b/docs/xdocs/language_manual/cli.xml index a293382..eb91e44 100644 --- a/docs/xdocs/language_manual/cli.xml +++ b/docs/xdocs/language_manual/cli.xml @@ -163,7 +163,7 @@ Sample Usage:

-Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use hive-log4j.default in the conf/ directory of the hive installation which writes out logs to /tmp/$USER/hive.log and uses the WARN level. +Hive uses log4j for logging. These logs are not emitted to the standard output by default but are instead captured to a log file specified by Hive's log4j properties file. By default Hive will use hive-log4j2.xml in the conf/ directory of the hive installation which writes out logs to /tmp/$USER/hive.log and uses the WARN level.

It is often desirable to emit the logs to the standard output and/or change the logging level for debugging purposes. These can be done from the command line as follows:

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/bin/hcat_server.sh ---------------------------------------------------------------------- diff --git a/hcatalog/bin/hcat_server.sh b/hcatalog/bin/hcat_server.sh index 6b09d3e..d1aecb8 100644 --- a/hcatalog/bin/hcat_server.sh +++ b/hcatalog/bin/hcat_server.sh @@ -84,7 +84,7 @@ function start_hcat() { export AUX_CLASSPATH=${AUX_CLASSPATH} export HADOOP_HOME=$HADOOP_HOME - #export HADOOP_OPTS="-Dlog4j.configuration=file://${HCAT_PREFIX}/conf/log4j.properties" + #export HADOOP_OPTS="-Dlog4j.configurationFile=file://${HCAT_PREFIX}/conf/log4j2.xml" export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${HCAT_LOG_DIR}/hcat_err_pid%p.log -Xloggc:${HCAT_LOG_DIR}/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps" export HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-2048} # 8G is better if you have it export METASTORE_PORT=${METASTORE_PORT:-9083} http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/bin/templeton.cmd ---------------------------------------------------------------------- diff --git a/hcatalog/bin/templeton.cmd b/hcatalog/bin/templeton.cmd index e9a735d..759f654 100644 --- a/hcatalog/bin/templeton.cmd +++ b/hcatalog/bin/templeton.cmd @@ -59,9 +59,9 @@ setlocal enabledelayedexpansion if not defined TEMPLETON_LOG4J ( @rem must be prefixed with file: otherwise config is not picked up - set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j.properties + set TEMPLETON_LOG4J=file:%WEBHCAT_CONF_DIR%\webhcat-log4j2.xml ) - set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configuration=%TEMPLETON_LOG4J% %HADOOP_OPTS% + set TEMPLETON_OPTS=-Dtempleton.log.dir=%TEMPLETON_LOG_DIR% -Dlog4j.configurationFile=%TEMPLETON_LOG4J% %HADOOP_OPTS% set arguments=%JAVA_HEAP_MAX% %TEMPLETON_OPTS% -classpath %CLASSPATH% org.apache.hive.hcatalog.templeton.Main if defined service_entry ( http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/scripts/hcat_server_start.sh ---------------------------------------------------------------------- diff --git a/hcatalog/scripts/hcat_server_start.sh b/hcatalog/scripts/hcat_server_start.sh index 1670b70..872d1b5 100755 --- a/hcatalog/scripts/hcat_server_start.sh +++ b/hcatalog/scripts/hcat_server_start.sh @@ -70,7 +70,7 @@ export AUX_CLASSPATH=${AUX_CLASSPATH} export HADOOP_HOME=$HADOOP_HOME -#export HADOOP_OPTS="-Dlog4j.configuration=file://${ROOT}/conf/log4j.properties" +#export HADOOP_OPTS="-Dlog4j.configurationFile=file://${ROOT}/conf/log4j2.xml" export HADOOP_OPTS="${HADOOP_OPTS} -server -XX:+UseConcMarkSweepGC -XX:ErrorFile=${ROOT}/var/log/hcat_err_pid%p.log -Xloggc:${ROOT}/var/log/hcat_gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps" export HADOOP_HEAPSIZE=2048 # 8G is better if you have it http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml ---------------------------------------------------------------------- diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml index 9757b9c..6385e40 100644 --- a/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml +++ b/hcatalog/src/docs/src/documentation/content/xdocs/configuration.xml @@ -66,7 +66,7 @@ ${env.PIG_HOME}/bin/pig uncertainty.

Note: The location of the log files created by Templeton and some other properties - of the logging system are set in the webhcat-log4j.properties file.

+ of the logging system are set in the webhcat-log4j2.xml file.

Variables http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/docs/src/documentation/content/xdocs/install.xml ---------------------------------------------------------------------- diff --git a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml index 16da248..e2953a9 100644 --- a/hcatalog/src/docs/src/documentation/content/xdocs/install.xml +++ b/hcatalog/src/docs/src/documentation/content/xdocs/install.xml @@ -241,7 +241,7 @@

Server activity logs are located in root/var/log/hcat_server. Logging configuration is located at - root/conf/log4j.properties. Server logging uses + root/conf/log4j2.xml. Server logging uses DailyRollingFileAppender by default. It will generate a new file per day and does not expire old log files automatically.

http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties ---------------------------------------------------------------------- diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties deleted file mode 100644 index 82684b3..0000000 --- a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties +++ /dev/null @@ -1,88 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define some default values that can be overridden by system properties -hive.log.threshold=ALL -hive.root.logger=DEBUG,DRFA -hive.log.dir=/tmp/ekoifman -hive.log.file=hive.log - -# Define the root logger to the system property "hadoop.root.logger". -log4j.rootLogger=${hive.root.logger}, EventCounter - -# Logging Threshold -log4j.threshold=${hive.log.threshold} - -# -# Daily Rolling File Appender -# -# Use the PidDailyerRollingFileAppend class instead if you want to use separate log files -# for different CLI session. -# -# log4j.appender.DRFA=org.apache.hadoop.hive.ql.log.PidDailyRollingFileAppender - -log4j.appender.DRFA=org.apache.log4j.DailyRollingFileAppender - -log4j.appender.DRFA.File=${hive.log.dir}/${hive.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern=.yyyy-MM-dd - -# 30-day backup -#log4j.appender.DRFA.MaxBackupIndex=30 -log4j.appender.DRFA.layout=org.apache.log4j.PatternLayout - -# Pattern format: Date LogLevel LoggerName LogMessage -#log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n -# Debugging Pattern format -log4j.appender.DRFA.layout.ConversionPattern=%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n - - -# -# console -# Add "console" to rootlogger above if you want to use this -# - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.target=System.err -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} [%t]: %p %c{2}: %m%n -log4j.appender.console.encoding=UTF-8 - -#custom logging levels -#log4j.logger.xxx=DEBUG - -# -# Event Counter Appender -# Sends counts of logging messages at different severity levels to Hadoop Metrics. -# -log4j.appender.EventCounter=org.apache.hadoop.hive.shims.HiveEventCounter - - -log4j.category.DataNucleus=ERROR,DRFA -log4j.category.Datastore=ERROR,DRFA -log4j.category.Datastore.Schema=ERROR,DRFA -log4j.category.JPOX.Datastore=ERROR,DRFA -log4j.category.JPOX.Plugin=ERROR,DRFA -log4j.category.JPOX.MetaData=ERROR,DRFA -log4j.category.JPOX.Query=ERROR,DRFA -log4j.category.JPOX.General=ERROR,DRFA -log4j.category.JPOX.Enhancer=ERROR,DRFA - - -# Silence useless ZK logs -log4j.logger.org.apache.zookeeper.server.NIOServerCnxn=WARN,DRFA -log4j.logger.org.apache.zookeeper.ClientCnxnSocketNIO=WARN,DRFA http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml ---------------------------------------------------------------------- diff --git a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml new file mode 100644 index 0000000..30f7603 --- /dev/null +++ b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml @@ -0,0 +1,111 @@ + + + + + + + ALL + DEBUG + DRFA + ${sys:java.io.tmpdir}/${sys:user.name} + hive.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh ---------------------------------------------------------------------- diff --git a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh index 8cc9353..e59177c 100755 --- a/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh +++ b/hcatalog/src/test/e2e/templeton/deployers/start_hive_services.sh @@ -31,7 +31,7 @@ cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site. #cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.mssql.xml ${HIVE_HOME}/conf/hive-site.xml cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/webhcat/webhcat-site.xml ${HIVE_HOME}/hcatalog/etc/webhcat/webhcat-site.xml -cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j.properties ${HIVE_HOME}/conf/hive-log4j.properties +cp ${PROJ_HOME}/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-log4j2.xml ${HIVE_HOME}/conf/hive-log4j2.xml if [ -f ${MYSQL_CLIENT_JAR} ]; then cp ${MYSQL_CLIENT_JAR} ${HIVE_HOME}/lib http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh ---------------------------------------------------------------------- diff --git a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh index 0be8dde..c80fdd5 100644 --- a/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh +++ b/hcatalog/webhcat/svr/src/main/bin/webhcat_server.sh @@ -215,11 +215,11 @@ else fi if [[ -z "$WEBHCAT_LOG4J" ]]; then - WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j.properties"; + WEBHCAT_LOG4J="file://$base_dir/etc/webhcat/webhcat-log4j2.xml"; fi export HADOOP_USER_CLASSPATH_FIRST=true -export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configuration=$WEBHCAT_LOG4J" +export HADOOP_OPTS="${HADOOP_OPTS} -Dwebhcat.log.dir=$WEBHCAT_LOG_DIR -Dlog4j.configurationFile=$WEBHCAT_LOG4J" start_cmd="$HADOOP_PREFIX/bin/hadoop jar $JAR org.apache.hive.hcatalog.templeton.Main " http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties ---------------------------------------------------------------------- diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties deleted file mode 100644 index 866052c..0000000 --- a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j.properties +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Define some default values that can be overridden by system properties -webhcat.root.logger = INFO, standard -webhcat.log.dir = . -webhcat.log.file = webhcat.log - -log4j.rootLogger = ${webhcat.root.logger} - -# Logging Threshold -log4j.threshhold = DEBUG - -log4j.appender.standard = org.apache.log4j.DailyRollingFileAppender -log4j.appender.standard.File = ${webhcat.log.dir}/${webhcat.log.file} - -# Rollver at midnight -log4j.appender.DRFA.DatePattern = .yyyy-MM-dd - -log4j.appender.DRFA.layout = org.apache.log4j.PatternLayout - -log4j.appender.standard.layout = org.apache.log4j.PatternLayout -log4j.appender.standard.layout.conversionPattern = %-5p | %d{DATE} | %c | %m%n - -# Class logging settings -log4j.logger.com.sun.jersey = DEBUG -log4j.logger.com.sun.jersey.spi.container.servlet.WebComponent = ERROR -log4j.logger.org.apache.hadoop = INFO -log4j.logger.org.apache.hadoop.conf = WARN -log4j.logger.org.apache.zookeeper = WARN -log4j.logger.org.eclipse.jetty = INFO http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml ---------------------------------------------------------------------- diff --git a/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml new file mode 100644 index 0000000..40da974 --- /dev/null +++ b/hcatalog/webhcat/svr/src/main/config/webhcat-log4j2.xml @@ -0,0 +1,75 @@ + + + + + + + ALL + INFO + standard + . + webhcat.log + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java ---------------------------------------------------------------------- diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java index 73f470c..268c218 100644 --- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java +++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java @@ -670,6 +670,8 @@ public class Exec extends HplsqlBaseVisitor { if (!parseArguments(args)) { return 1; } + // specify the default log4j2 properties file. + System.setProperty("log4j.configurationFile", "hive-log4j2.xml"); conf = new Conf(); conf.init(); conn = new Conn(this); http://git-wip-us.apache.org/repos/asf/hive/blob/c93d6c77/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java new file mode 100644 index 0000000..93c16de --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/operation/TestOperationLoggingLayout.java @@ -0,0 +1,136 @@ +package org.apache.hive.service.cli.operation; + +import java.io.File; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hive.jdbc.miniHS2.MiniHS2; +import org.apache.hive.service.cli.CLIServiceClient; +import org.apache.hive.service.cli.FetchOrientation; +import org.apache.hive.service.cli.FetchType; +import org.apache.hive.service.cli.OperationHandle; +import org.apache.hive.service.cli.RowSet; +import org.apache.hive.service.cli.SessionHandle; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +/** + * Tests to verify operation logging layout for different modes. + */ +public class TestOperationLoggingLayout { + protected static HiveConf hiveConf; + protected static String tableName; + private File dataFile; + protected CLIServiceClient client; + protected static MiniHS2 miniHS2 = null; + protected static Map confOverlay; + protected SessionHandle sessionHandle; + protected final String sql = "select * from " + tableName; + private final String sqlCntStar = "select count(*) from " + tableName; + + @BeforeClass + public static void setUpBeforeClass() throws Exception { + tableName = "TestOperationLoggingLayout_table"; + hiveConf = new HiveConf(); + hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LEVEL.varname, "execution"); + // We need to set the below parameter to test performance level logging + hiveConf.set("hive.ql.log.PerfLogger.level", "INFO,DRFA"); + miniHS2 = new MiniHS2(hiveConf); + confOverlay = new HashMap(); + confOverlay.put(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false"); + miniHS2.start(confOverlay); + } + + /** + * Open a session, and create a table for cases usage + * + * @throws Exception + */ + @Before + public void setUp() throws Exception { + dataFile = new File(hiveConf.get("test.data.files"), "kv1.txt"); + client = miniHS2.getServiceClient(); + sessionHandle = setupSession(); + } + + @After + public void tearDown() throws Exception { + // Cleanup + String queryString = "DROP TABLE " + tableName; + client.executeStatement(sessionHandle, queryString, null); + + client.closeSession(sessionHandle); + } + + @AfterClass + public static void tearDownAfterClass() throws Exception { + miniHS2.stop(); + } + + @Test + public void testSwitchLogLayout() throws Exception { + // verify whether the sql operation log is generated and fetch correctly. + OperationHandle operationHandle = client.executeStatement(sessionHandle, sqlCntStar, null); + RowSet rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 1000, + FetchType.LOG); + Iterator iter = rowSetLog.iterator(); + // non-verbose pattern is %-5p : %m%n. Look for " : " + while (iter.hasNext()) { + String row = iter.next()[0].toString(); + Assert.assertEquals(true, row.matches("^(FATAL|ERROR|WARN|INFO|DEBUG|TRACE).*$")); + } + + String queryString = "set hive.server2.logging.operation.level=verbose"; + client.executeStatement(sessionHandle, queryString, null); + operationHandle = client.executeStatement(sessionHandle, sqlCntStar, null); + // just check for first few lines, some log lines are multi-line strings which can break format + // checks below + rowSetLog = client.fetchResults(operationHandle, FetchOrientation.FETCH_FIRST, 10, + FetchType.LOG); + iter = rowSetLog.iterator(); + // verbose pattern is "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n" + while (iter.hasNext()) { + String row = iter.next()[0].toString(); + // just check if the log line starts with date + Assert.assertEquals(true, + row.matches("^\\d{2}[/](0[1-9]|1[012])[/](0[1-9]|[12][0-9]|3[01]).*$")); + } + } + + private SessionHandle setupSession() throws Exception { + // Open a session + SessionHandle sessionHandle = client.openSession(null, null, null); + + // Change lock manager to embedded mode + String queryString = "SET hive.lock.manager=" + + "org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager"; + client.executeStatement(sessionHandle, queryString, null); + + // Drop the table if it exists + queryString = "DROP TABLE IF EXISTS " + tableName; + client.executeStatement(sessionHandle, queryString, null); + + // Create a test table + queryString = "create table " + tableName + " (key int, value string)"; + client.executeStatement(sessionHandle, queryString, null); + + // Load data + queryString = "load data local inpath '" + dataFile + "' into table " + tableName; + client.executeStatement(sessionHandle, queryString, null); + + // Precondition check: verify whether the table is created and data is fetched correctly. + OperationHandle operationHandle = client.executeStatement(sessionHandle, sql, null); + RowSet rowSetResult = client.fetchResults(operationHandle); + Assert.assertEquals(500, rowSetResult.numRows()); + Assert.assertEquals(238, rowSetResult.iterator().next()[0]); + Assert.assertEquals("val_238", rowSetResult.iterator().next()[1]); + + return sessionHandle; + } +}