hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject [1/2] hive git commit: HIVE-1555: JDBC Storage Handler (Gunther Hagleitner, reviewed by Jason Dere)
Date Wed, 01 Mar 2017 07:57:01 GMT
Repository: hive
Updated Branches:
  refs/heads/master a9de1cdbb -> 12b27a355


http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessorTest.java
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessorTest.java
b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessorTest.java
new file mode 100644
index 0000000..5fd600b
--- /dev/null
+++ b/jdbc-handler/src/test/java/org/apache/hive/storage/jdbc/dao/GenericJdbcDatabaseAccessorTest.java
@@ -0,0 +1,206 @@
+/*
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive.storage.jdbc.dao;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.equalToIgnoringCase;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.junit.Assert.assertThat;
+
+import org.apache.hadoop.conf.Configuration;
+import org.junit.Test;
+
+import org.apache.hive.storage.jdbc.conf.JdbcStorageConfig;
+import org.apache.hive.storage.jdbc.exception.HiveJdbcDatabaseAccessException;
+
+import java.util.List;
+import java.util.Map;
+
+public class GenericJdbcDatabaseAccessorTest {
+
+  @Test
+  public void testGetColumnNames_starQuery() throws HiveJdbcDatabaseAccessException {
+    Configuration conf = buildConfiguration();
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    List<String> columnNames = accessor.getColumnNames(conf);
+
+    assertThat(columnNames, is(notNullValue()));
+    assertThat(columnNames.size(), is(equalTo(7)));
+    assertThat(columnNames.get(0), is(equalToIgnoringCase("strategy_id")));
+  }
+
+
+  @Test
+  public void testGetColumnNames_fieldListQuery() throws HiveJdbcDatabaseAccessException
{
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select name,referrer from test_strategy");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    List<String> columnNames = accessor.getColumnNames(conf);
+
+    assertThat(columnNames, is(notNullValue()));
+    assertThat(columnNames.size(), is(equalTo(2)));
+    assertThat(columnNames.get(0), is(equalToIgnoringCase("name")));
+  }
+
+
+  @Test(expected = HiveJdbcDatabaseAccessException.class)
+  public void testGetColumnNames_invalidQuery() throws HiveJdbcDatabaseAccessException {
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from invalid_strategy");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    @SuppressWarnings("unused")
+      List<String> columnNames = accessor.getColumnNames(conf);
+  }
+
+
+  @Test
+  public void testGetTotalNumberOfRecords() throws HiveJdbcDatabaseAccessException {
+    Configuration conf = buildConfiguration();
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    int numRecords = accessor.getTotalNumberOfRecords(conf);
+
+    assertThat(numRecords, is(equalTo(5)));
+  }
+
+
+  @Test
+  public void testGetTotalNumberOfRecords_whereClause() throws HiveJdbcDatabaseAccessException
{
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where
strategy_id = '5'");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    int numRecords = accessor.getTotalNumberOfRecords(conf);
+
+    assertThat(numRecords, is(equalTo(1)));
+  }
+
+
+  @Test
+  public void testGetTotalNumberOfRecords_noRecords() throws HiveJdbcDatabaseAccessException
{
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where
strategy_id = '25'");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    int numRecords = accessor.getTotalNumberOfRecords(conf);
+
+    assertThat(numRecords, is(equalTo(0)));
+  }
+
+
+  @Test(expected = HiveJdbcDatabaseAccessException.class)
+  public void testGetTotalNumberOfRecords_invalidQuery() throws HiveJdbcDatabaseAccessException
{
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from strategyx where strategy_id
= '5'");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    @SuppressWarnings("unused")
+      int numRecords = accessor.getTotalNumberOfRecords(conf);
+  }
+
+
+  @Test
+  public void testGetRecordIterator() throws HiveJdbcDatabaseAccessException {
+    Configuration conf = buildConfiguration();
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 2, 0);
+
+    assertThat(iterator, is(notNullValue()));
+
+    int count = 0;
+    while (iterator.hasNext()) {
+      Map<String, String> record = iterator.next();
+      count++;
+
+      assertThat(record, is(notNullValue()));
+      assertThat(record.size(), is(equalTo(7)));
+      assertThat(record.get("STRATEGY_ID"), is(equalTo(String.valueOf(count))));
+    }
+
+    assertThat(count, is(equalTo(2)));
+    iterator.close();
+  }
+
+
+  @Test
+  public void testGetRecordIterator_offsets() throws HiveJdbcDatabaseAccessException {
+    Configuration conf = buildConfiguration();
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 2, 2);
+
+    assertThat(iterator, is(notNullValue()));
+
+    int count = 0;
+    while (iterator.hasNext()) {
+      Map<String, String> record = iterator.next();
+      count++;
+
+      assertThat(record, is(notNullValue()));
+      assertThat(record.size(), is(equalTo(7)));
+      assertThat(record.get("STRATEGY_ID"), is(equalTo(String.valueOf(count + 2))));
+    }
+
+    assertThat(count, is(equalTo(2)));
+    iterator.close();
+  }
+
+
+  @Test
+  public void testGetRecordIterator_emptyResultSet() throws HiveJdbcDatabaseAccessException
{
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy where
strategy_id = '25'");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 0, 2);
+
+    assertThat(iterator, is(notNullValue()));
+    assertThat(iterator.hasNext(), is(false));
+    iterator.close();
+  }
+
+
+  @Test
+  public void testGetRecordIterator_largeOffset() throws HiveJdbcDatabaseAccessException
{
+    Configuration conf = buildConfiguration();
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 10, 25);
+
+    assertThat(iterator, is(notNullValue()));
+    assertThat(iterator.hasNext(), is(false));
+    iterator.close();
+  }
+
+
+  @Test(expected = HiveJdbcDatabaseAccessException.class)
+  public void testGetRecordIterator_invalidQuery() throws HiveJdbcDatabaseAccessException
{
+    Configuration conf = buildConfiguration();
+    conf.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from strategyx");
+    DatabaseAccessor accessor = DatabaseAccessorFactory.getAccessor(conf);
+    @SuppressWarnings("unused")
+      JdbcRecordIterator iterator = accessor.getRecordIterator(conf, 0, 2);
+  }
+
+
+  private Configuration buildConfiguration() {
+    String scriptPath =
+      GenericJdbcDatabaseAccessorTest.class.getClassLoader().getResource("test_script.sql")
+      .getPath();
+    Configuration config = new Configuration();
+    config.set(JdbcStorageConfig.DATABASE_TYPE.getPropertyName(), "H2");
+    config.set(JdbcStorageConfig.JDBC_DRIVER_CLASS.getPropertyName(), "org.h2.Driver");
+    config.set(JdbcStorageConfig.JDBC_URL.getPropertyName(), "jdbc:h2:mem:test;MODE=MySQL;INIT=runscript
from '"
+        + scriptPath + "'");
+    config.set(JdbcStorageConfig.QUERY.getPropertyName(), "select * from test_strategy");
+
+    return config;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/jdbc-handler/src/test/resources/condition1.xml
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/test/resources/condition1.xml b/jdbc-handler/src/test/resources/condition1.xml
new file mode 100644
index 0000000..005fc25
--- /dev/null
+++ b/jdbc-handler/src/test/resources/condition1.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<java version="1.7.0_05" class="java.beans.XMLDecoder">
+	<object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
+		<void property="children">
+			<object class="java.util.ArrayList">
+				<void method="add">
+					<object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
+						<void property="column">
+							<string>visitor_id</string>
+						</void>
+						<void property="tabAlias">
+							<string>mysql_test</string>
+						</void>
+						<void property="typeInfo">
+							<object
+								class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"
+								id="PrimitiveTypeInfo0">
+								<void property="typeName">
+									<string>string</string>
+								</void>
+							</object>
+						</void>
+					</object>
+				</void>
+				<void method="add">
+					<object class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
+						<void property="typeInfo">
+							<object idref="PrimitiveTypeInfo0" />
+						</void>
+						<void property="value">
+							<string>x</string>
+						</void>
+					</object>
+				</void>
+			</object>
+		</void>
+		<void property="genericUDF">
+			<object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual" />
+		</void>
+		<void property="typeInfo">
+			<object class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo">
+				<void property="typeName">
+					<string>boolean</string>
+				</void>
+			</object>
+		</void>
+	</object>
+</java>

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/jdbc-handler/src/test/resources/condition2.xml
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/test/resources/condition2.xml b/jdbc-handler/src/test/resources/condition2.xml
new file mode 100644
index 0000000..f879297
--- /dev/null
+++ b/jdbc-handler/src/test/resources/condition2.xml
@@ -0,0 +1,101 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<java version="1.7.0_05" class="java.beans.XMLDecoder">
+	<object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
+		<void property="children">
+			<object class="java.util.ArrayList">
+				<void method="add">
+					<object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
+						<void property="children">
+							<object class="java.util.ArrayList">
+								<void method="add">
+									<object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
+										<void property="column">
+											<string>visitor_id</string>
+										</void>
+										<void property="tabAlias">
+											<string>mysql_test</string>
+										</void>
+										<void property="typeInfo">
+											<object
+												class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"
+												id="PrimitiveTypeInfo0">
+												<void property="typeName">
+													<string>string</string>
+												</void>
+											</object>
+										</void>
+									</object>
+								</void>
+								<void method="add">
+									<object class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
+										<void property="typeInfo">
+											<object idref="PrimitiveTypeInfo0" />
+										</void>
+										<void property="value">
+											<string>x</string>
+										</void>
+									</object>
+								</void>
+							</object>
+						</void>
+						<void property="genericUDF">
+							<object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual" />
+						</void>
+						<void property="typeInfo">
+							<object
+								class="org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo"
+								id="PrimitiveTypeInfo1">
+								<void property="typeName">
+									<string>boolean</string>
+								</void>
+							</object>
+						</void>
+					</object>
+				</void>
+				<void method="add">
+					<object class="org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc">
+						<void property="children">
+							<object class="java.util.ArrayList">
+								<void method="add">
+									<object class="org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc">
+										<void property="column">
+											<string>sentiment</string>
+										</void>
+										<void property="tabAlias">
+											<string>mysql_test</string>
+										</void>
+										<void property="typeInfo">
+											<object idref="PrimitiveTypeInfo0" />
+										</void>
+									</object>
+								</void>
+								<void method="add">
+									<object class="org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc">
+										<void property="typeInfo">
+											<object idref="PrimitiveTypeInfo0" />
+										</void>
+										<void property="value">
+											<string>y</string>
+										</void>
+									</object>
+								</void>
+							</object>
+						</void>
+						<void property="genericUDF">
+							<object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual" />
+						</void>
+						<void property="typeInfo">
+							<object idref="PrimitiveTypeInfo1" />
+						</void>
+					</object>
+				</void>
+			</object>
+		</void>
+		<void property="genericUDF">
+			<object class="org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd" />
+		</void>
+		<void property="typeInfo">
+			<object idref="PrimitiveTypeInfo1" />
+		</void>
+	</object>
+</java>

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/jdbc-handler/src/test/resources/test_script.sql
----------------------------------------------------------------------
diff --git a/jdbc-handler/src/test/resources/test_script.sql b/jdbc-handler/src/test/resources/test_script.sql
new file mode 100644
index 0000000..5d7f08a
--- /dev/null
+++ b/jdbc-handler/src/test/resources/test_script.sql
@@ -0,0 +1,21 @@
+DROP TABLE IF EXISTS test_strategy;
+
+CREATE TABLE IF NOT EXISTS test_strategy (
+  strategy_id int(11) NOT NULL,
+  name varchar(50) NOT NULL,
+  referrer varchar(1024) DEFAULT NULL,
+  landing varchar(1024) DEFAULT NULL,
+  priority int(11) DEFAULT NULL,
+  implementation varchar(512) DEFAULT NULL,
+  last_modified timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  PRIMARY KEY (strategy_id)
+);
+
+
+INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, implementation,
last_modified) VALUES (1,'S1','aaa','abc',1000,NULL,'2012-05-08 15:01:15');
+INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, implementation,
last_modified) VALUES (2,'S2','bbb','def',990,NULL,'2012-05-08 15:01:15');
+INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, implementation,
last_modified) VALUES (3,'S3','ccc','ghi',1000,NULL,'2012-05-08 15:01:15');
+INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, implementation,
last_modified) VALUES (4,'S4','ddd','jkl',980,NULL,'2012-05-08 15:01:15');
+INSERT INTO test_strategy (strategy_id, name, referrer, landing, priority, implementation,
last_modified) VALUES (5,'S5','eee',NULL,NULL,NULL,'2012-05-08 15:01:15');
+
+

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/packaging/pom.xml
----------------------------------------------------------------------
diff --git a/packaging/pom.xml b/packaging/pom.xml
index 2439e19..a128036 100644
--- a/packaging/pom.xml
+++ b/packaging/pom.xml
@@ -215,6 +215,11 @@
     </dependency>
     <dependency>
       <groupId>org.apache.hive</groupId>
+      <artifactId>hive-jdbc-handler</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
       <artifactId>hive-accumulo-handler</artifactId>
       <version>${project.version}</version>
     </dependency>

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/packaging/src/main/assembly/src.xml
----------------------------------------------------------------------
diff --git a/packaging/src/main/assembly/src.xml b/packaging/src/main/assembly/src.xml
index e6af8b1..0529e90 100644
--- a/packaging/src/main/assembly/src.xml
+++ b/packaging/src/main/assembly/src.xml
@@ -69,6 +69,7 @@
         <include>dev-support/**/*</include>
         <include>docs/**/*</include>
         <include>druid-handler/**/*</include>
+        <include>jdbc-handler/**/*</include>
         <include>find-bugs/**/*</include>
         <include>hbase-handler/**/*</include>
         <include>hcatalog/**/*</include>

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3ddec7a..bfa66a1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -39,6 +39,7 @@
     <module>contrib</module>
     <module>druid-handler</module>
     <module>hbase-handler</module>
+    <module>jdbc-handler</module>
     <module>hcatalog</module>
     <module>hplsql</module>
     <module>jdbc</module>
@@ -137,8 +138,10 @@
     <druid.version>0.9.2</druid.version>
     <guava.version>14.0.1</guava.version>
     <groovy.version>2.4.4</groovy.version>
+    <h2database.version>1.3.166</h2database.version>
     <hadoop.version>2.7.2</hadoop.version>
     <hadoop.bin.path>${basedir}/${hive.path.to.root}/testutils/hadoop</hadoop.bin.path>
+    <hamcrest.version>1.1</hamcrest.version>
     <hbase.version>1.1.1</hbase.version>
     <!-- required for logging test to avoid including hbase which pulls disruptor transitively
-->
     <disruptor.version>3.3.0</disruptor.version>

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/ql/src/test/queries/clientpositive/jdbc_handler.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/jdbc_handler.q b/ql/src/test/queries/clientpositive/jdbc_handler.q
new file mode 100644
index 0000000..2038617
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/jdbc_handler.q
@@ -0,0 +1,58 @@
+CREATE EXTERNAL TABLE tables
+(
+id int,
+db_id int,
+name STRING,
+type STRING,
+owner STRING
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "DERBY",
+"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
+"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.query" = "SELECT TBL_ID, DB_ID, TBL_NAME, TBL_TYPE, OWNER FROM TBLS",
+"hive.sql.column.mapping" = "id=TBL_ID, db_id=DB_ID, name=TBL_NAME, type=TBL_TYPE, owner=OWNER",
+"hive.sql.dbcp.maxActive" = "1"
+);
+
+CREATE EXTERNAL TABLE dbs
+(
+id int,
+name STRING
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "DERBY",
+"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
+"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS",
+"hive.sql.column.mapping" = "id=DB_ID, name=NAME",
+"hive.sql.dbcp.maxActive" = "1"
+);
+
+select tables.name as tn, dbs.name as dn, tables.type as t
+from tables join dbs on (tables.db_id = dbs.id) order by tn, dn, t;
+
+explain
+select
+  t1.name as a, t2.key as b
+from
+  (select 1 as db_id, tables.name from tables) t1
+  join
+  (select distinct key from src) t2
+  on (t2.key-1) = t1.db_id
+order by a,b;
+
+select
+  t1.name as a, t2.key as b
+from
+  (select 1 as db_id, tables.name from tables) t1
+  join
+  (select distinct key from src) t2
+  on (t2.key-1) = t1.db_id
+order by a,b;
+
+show tables;
+
+describe tables;

http://git-wip-us.apache.org/repos/asf/hive/blob/12b27a35/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
new file mode 100644
index 0000000..74bd60b
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
@@ -0,0 +1,303 @@
+PREHOOK: query: CREATE EXTERNAL TABLE tables
+(
+id int,
+db_id int,
+name STRING,
+type STRING,
+#### A masked pattern was here ####
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "DERBY",
+"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
+"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.query" = "SELECT TBL_ID, DB_ID, TBL_NAME, TBL_TYPE, OWNER FROM TBLS",
+#### A masked pattern was here ####
+"hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tables
+POSTHOOK: query: CREATE EXTERNAL TABLE tables
+(
+id int,
+db_id int,
+name STRING,
+type STRING,
+#### A masked pattern was here ####
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "DERBY",
+"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
+"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.query" = "SELECT TBL_ID, DB_ID, TBL_NAME, TBL_TYPE, OWNER FROM TBLS",
+#### A masked pattern was here ####
+"hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tables
+PREHOOK: query: CREATE EXTERNAL TABLE dbs
+(
+id int,
+name STRING
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "DERBY",
+"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
+"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS",
+"hive.sql.column.mapping" = "id=DB_ID, name=NAME",
+"hive.sql.dbcp.maxActive" = "1"
+)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@dbs
+POSTHOOK: query: CREATE EXTERNAL TABLE dbs
+(
+id int,
+name STRING
+)
+STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
+TBLPROPERTIES (
+"hive.sql.database.type" = "DERBY",
+"hive.sql.jdbc.url" = "jdbc:derby:;databaseName=${test.tmp.dir}/junit_metastore_db;create=true",
+"hive.sql.jdbc.driver" = "org.apache.derby.jdbc.EmbeddedDriver",
+"hive.sql.query" = "SELECT DB_ID, NAME FROM DBS",
+"hive.sql.column.mapping" = "id=DB_ID, name=NAME",
+"hive.sql.dbcp.maxActive" = "1"
+)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dbs
+PREHOOK: query: select tables.name as tn, dbs.name as dn, tables.type as t
+from tables join dbs on (tables.db_id = dbs.id) order by tn, dn, t
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dbs
+PREHOOK: Input: default@tables
+#### A masked pattern was here ####
+POSTHOOK: query: select tables.name as tn, dbs.name as dn, tables.type as t
+from tables join dbs on (tables.db_id = dbs.id) order by tn, dn, t
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dbs
+POSTHOOK: Input: default@tables
+#### A masked pattern was here ####
+alltypesorc	default	MANAGED_TABLE
+cbo_t1	default	MANAGED_TABLE
+cbo_t2	default	MANAGED_TABLE
+cbo_t3	default	MANAGED_TABLE
+dbs	default	EXTERNAL_TABLE
+lineitem	default	MANAGED_TABLE
+part	default	MANAGED_TABLE
+src	default	MANAGED_TABLE
+src1	default	MANAGED_TABLE
+src_cbo	default	MANAGED_TABLE
+src_json	default	MANAGED_TABLE
+src_sequencefile	default	MANAGED_TABLE
+src_thrift	default	MANAGED_TABLE
+srcbucket	default	MANAGED_TABLE
+srcbucket2	default	MANAGED_TABLE
+srcpart	default	MANAGED_TABLE
+tables	default	EXTERNAL_TABLE
+PREHOOK: query: explain
+select
+  t1.name as a, t2.key as b
+from
+  (select 1 as db_id, tables.name from tables) t1
+  join
+  (select distinct key from src) t2
+  on (t2.key-1) = t1.db_id
+order by a,b
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select
+  t1.name as a, t2.key as b
+from
+  (select 1 as db_id, tables.name from tables) t1
+  join
+  (select distinct key from src) t2
+  on (t2.key-1) = t1.db_id
+order by a,b
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE), Reducer 5 (SIMPLE_EDGE)
+        Reducer 3 <- Reducer 2 (SIMPLE_EDGE)
+        Reducer 5 <- Map 4 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tables
+                  Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats:
NONE
+                  Select Operator
+                    expressions: name (type: string)
+                    outputColumnNames: _col1
+                    Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats:
NONE
+                    Reduce Output Operator
+                      key expressions: 1.0 (type: double)
+                      sort order: +
+                      Map-reduce partition columns: 1.0 (type: double)
+                      Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats:
NONE
+                      value expressions: _col1 (type: string)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Map 4 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column
stats: COMPLETE
+                  Filter Operator
+                    predicate: (key - 1) is not null (type: boolean)
+                    Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column
stats: COMPLETE
+                    Group By Operator
+                      keys: key (type: string)
+                      mode: hash
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column
stats: COMPLETE
+                      Reduce Output Operator
+                        key expressions: _col0 (type: string)
+                        sort order: +
+                        Map-reduce partition columns: _col0 (type: string)
+                        Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE
Column stats: COMPLETE
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Merge Join Operator
+                condition map:
+                     Inner Join 0 to 1
+                keys:
+                  0 1.0 (type: double)
+                  1 (_col0 - 1) (type: double)
+                outputColumnNames: _col1, _col2
+                Statistics: Num rows: 225 Data size: 19618 Basic stats: COMPLETE Column stats:
NONE
+                Select Operator
+                  expressions: _col1 (type: string), _col2 (type: string)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 225 Data size: 19618 Basic stats: COMPLETE Column
stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: string), _col1 (type: string)
+                    sort order: ++
+                    Statistics: Num rows: 225 Data size: 19618 Basic stats: COMPLETE Column
stats: NONE
+        Reducer 3 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type:
string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 225 Data size: 19618 Basic stats: COMPLETE Column stats:
NONE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 225 Data size: 19618 Basic stats: COMPLETE Column
stats: NONE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+        Reducer 5 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                keys: KEY._col0 (type: string)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column stats:
COMPLETE
+                Reduce Output Operator
+                  key expressions: (_col0 - 1) (type: double)
+                  sort order: +
+                  Map-reduce partition columns: (_col0 - 1) (type: double)
+                  Statistics: Num rows: 205 Data size: 17835 Basic stats: COMPLETE Column
stats: COMPLETE
+                  value expressions: _col0 (type: string)
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select
+  t1.name as a, t2.key as b
+from
+  (select 1 as db_id, tables.name from tables) t1
+  join
+  (select distinct key from src) t2
+  on (t2.key-1) = t1.db_id
+order by a,b
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Input: default@tables
+#### A masked pattern was here ####
+POSTHOOK: query: select
+  t1.name as a, t2.key as b
+from
+  (select 1 as db_id, tables.name from tables) t1
+  join
+  (select distinct key from src) t2
+  on (t2.key-1) = t1.db_id
+order by a,b
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Input: default@tables
+#### A masked pattern was here ####
+alltypesorc	2
+cbo_t1	2
+cbo_t2	2
+cbo_t3	2
+dbs	2
+lineitem	2
+part	2
+src	2
+src1	2
+src_cbo	2
+src_json	2
+src_sequencefile	2
+src_thrift	2
+srcbucket	2
+srcbucket2	2
+srcpart	2
+tables	2
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+PREHOOK: Input: database:default
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+POSTHOOK: Input: database:default
+alltypesorc
+cbo_t1
+cbo_t2
+cbo_t3
+dbs
+lineitem
+part
+src
+src1
+src_cbo
+src_json
+src_sequencefile
+src_thrift
+srcbucket
+srcbucket2
+srcpart
+tables
+PREHOOK: query: describe tables
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@tables
+POSTHOOK: query: describe tables
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@tables
+id                  	string              	from deserializer   
+db_id               	string              	from deserializer   
+name                	string              	from deserializer   
+type                	string              	from deserializer   
+#### A masked pattern was here ####


Mime
View raw message