Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id E1ADF200C6C for ; Fri, 5 May 2017 19:31:52 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id E03CD160BCD; Fri, 5 May 2017 17:31:52 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id C6869160BCA for ; Fri, 5 May 2017 19:31:50 +0200 (CEST) Received: (qmail 55548 invoked by uid 500); 5 May 2017 17:31:48 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 55128 invoked by uid 99); 5 May 2017 17:31:48 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Fri, 05 May 2017 17:31:48 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 5948FE967F; Fri, 5 May 2017 17:31:48 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: weiz@apache.org To: commits@hive.apache.org Date: Fri, 05 May 2017 17:31:57 -0000 Message-Id: In-Reply-To: <0285c7260809419fb269761b664eb6dd@git.apache.org> References: <0285c7260809419fb269761b664eb6dd@git.apache.org> X-Mailer: ASF-Git Admin Mailer Subject: [10/51] [partial] hive git commit: HIVE-14671 : merge master into hive-14535 (Wei Zheng) archived-at: Fri, 05 May 2017 17:31:53 -0000 http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java index d6460cd..df05af1 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/DummyRawStoreForJdoConnection.java @@ -31,6 +31,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.AggrStats; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; +import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj; import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; @@ -892,6 +893,13 @@ public class DummyRawStoreForJdoConnection implements RawStore { } @Override + public Map getAggrColStatsForTablePartitions(String dbName, + String tableName) throws MetaException, NoSuchObjectException { + // TODO Auto-generated method stub + return null; + } + + @Override public void createTableWrite(Table tbl, long writeId, char state, long heartbeat) { } http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java b/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java index f8eed18..2166c20 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java @@ -42,7 +42,6 @@ public class TestHiveMetaStoreTimeout { public static void setUp() throws Exception { HiveMetaStore.TEST_TIMEOUT_ENABLED = true; hiveConf = new HiveConf(TestHiveMetaStoreTimeout.class); - hiveConf.setBoolean(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS.varname, true); hiveConf.set(HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS.varname, MockPartitionExpressionForMetastore.class.getCanonicalName()); hiveConf.setTimeVar(HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT, 10 * 1000, http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java index aaa03fb..d008c75 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/TestObjectStore.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hive.common.metrics.metrics2.MetricsReporting; import org.apache.hadoop.hive.common.metrics.MetricsTestUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.FileMetadataExprType; @@ -41,6 +42,9 @@ import org.apache.hadoop.hive.metastore.api.Index; import org.apache.hadoop.hive.metastore.api.InvalidInputException; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.NotificationEventRequest; +import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PrincipalType; @@ -51,6 +55,7 @@ import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; +import org.apache.hadoop.hive.metastore.messaging.EventMessage; import org.apache.hadoop.hive.metastore.model.MTableWrite; import org.apache.hadoop.hive.ql.io.sarg.SearchArgument; import org.apache.hadoop.hive.serde.serdeConstants; @@ -62,10 +67,12 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.mockito.Mockito; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Supplier; +import javax.jdo.Query; public class TestObjectStore { private ObjectStore objectStore = null; @@ -135,6 +142,56 @@ public class TestObjectStore { } /** + * Test notification operations + */ + @Test + public void testNotificationOps() throws InterruptedException { + final int NO_EVENT_ID = 0; + final int FIRST_EVENT_ID = 1; + final int SECOND_EVENT_ID = 2; + + NotificationEvent event = + new NotificationEvent(0, 0, EventMessage.EventType.CREATE_DATABASE.toString(), ""); + NotificationEventResponse eventResponse; + CurrentNotificationEventId eventId; + + // Verify that there is no notifications available yet + eventId = objectStore.getCurrentNotificationEventId(); + Assert.assertEquals(NO_EVENT_ID, eventId.getEventId()); + + // Verify that addNotificationEvent() updates the NotificationEvent with the new event ID + objectStore.addNotificationEvent(event); + Assert.assertEquals(FIRST_EVENT_ID, event.getEventId()); + objectStore.addNotificationEvent(event); + Assert.assertEquals(SECOND_EVENT_ID, event.getEventId()); + + // Verify that objectStore fetches the latest notification event ID + eventId = objectStore.getCurrentNotificationEventId(); + Assert.assertEquals(SECOND_EVENT_ID, eventId.getEventId()); + + // Verify that getNextNotification() returns all events + eventResponse = objectStore.getNextNotification(new NotificationEventRequest()); + Assert.assertEquals(2, eventResponse.getEventsSize()); + Assert.assertEquals(FIRST_EVENT_ID, eventResponse.getEvents().get(0).getEventId()); + Assert.assertEquals(SECOND_EVENT_ID, eventResponse.getEvents().get(1).getEventId()); + + // Verify that getNextNotification(last) returns events after a specified event + eventResponse = objectStore.getNextNotification(new NotificationEventRequest(FIRST_EVENT_ID)); + Assert.assertEquals(1, eventResponse.getEventsSize()); + Assert.assertEquals(SECOND_EVENT_ID, eventResponse.getEvents().get(0).getEventId()); + + // Verify that getNextNotification(last) returns zero events if there are no more notifications available + eventResponse = objectStore.getNextNotification(new NotificationEventRequest(SECOND_EVENT_ID)); + Assert.assertEquals(0, eventResponse.getEventsSize()); + + // Verify that cleanNotificationEvents() cleans up all old notifications + Thread.sleep(1); + objectStore.cleanNotificationEvents(1); + eventResponse = objectStore.getNextNotification(new NotificationEventRequest()); + Assert.assertEquals(0, eventResponse.getEventsSize()); + } + + /** * Test database operations */ @Test @@ -525,4 +582,15 @@ public class TestObjectStore { } catch (NoSuchObjectException e) { } } + + @Test + public void testQueryCloseOnError() throws Exception { + ObjectStore spy = Mockito.spy(objectStore); + spy.getAllDatabases(); + spy.getAllFunctions(); + spy.getAllTables(DB1); + spy.getPartitionCount(); + Mockito.verify(spy, Mockito.times(3)) + .rollbackAndCleanup(Mockito.anyBoolean(), Mockito.anyObject()); + } } http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java index 9acf9d7..a8c7ac3 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/VerifyingObjectStore.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.thrift.TException; -class VerifyingObjectStore extends ObjectStore { +public class VerifyingObjectStore extends ObjectStore { private static final Logger LOG = LoggerFactory.getLogger(VerifyingObjectStore.class); public VerifyingObjectStore() { http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java b/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java new file mode 100644 index 0000000..0ab20d6 --- /dev/null +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/cache/TestCachedStore.java @@ -0,0 +1,238 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.cache; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.ObjectStore; +import org.apache.hadoop.hive.metastore.TestObjectStore.MockPartitionExpressionProxy; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +public class TestCachedStore { + + private CachedStore cachedStore = new CachedStore(); + + @Before + public void setUp() throws Exception { + HiveConf conf = new HiveConf(); + conf.setVar(HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS, MockPartitionExpressionProxy.class.getName()); + + ObjectStore objectStore = new ObjectStore(); + objectStore.setConf(conf); + + cachedStore.setRawStore(objectStore); + + SharedCache.getDatabaseCache().clear(); + SharedCache.getTableCache().clear(); + SharedCache.getPartitionCache().clear(); + SharedCache.getSdCache().clear(); + SharedCache.getPartitionColStatsCache().clear(); + } + + @Test + public void testSharedStoreDb() { + Database db1 = new Database(); + Database db2 = new Database(); + Database db3 = new Database(); + Database newDb1 = new Database(); + newDb1.setName("db1"); + + SharedCache.addDatabaseToCache("db1", db1); + SharedCache.addDatabaseToCache("db2", db2); + SharedCache.addDatabaseToCache("db3", db3); + + Assert.assertEquals(SharedCache.getCachedDatabaseCount(), 3); + + SharedCache.alterDatabaseInCache("db1", newDb1); + + Assert.assertEquals(SharedCache.getCachedDatabaseCount(), 3); + + SharedCache.removeDatabaseFromCache("db2"); + + Assert.assertEquals(SharedCache.getCachedDatabaseCount(), 2); + + List dbs = SharedCache.listCachedDatabases(); + Assert.assertEquals(dbs.size(), 2); + Assert.assertTrue(dbs.contains("db1")); + Assert.assertTrue(dbs.contains("db3")); + } + + @Test + public void testSharedStoreTable() { + Table tbl1 = new Table(); + StorageDescriptor sd1 = new StorageDescriptor(); + List cols1 = new ArrayList(); + cols1.add(new FieldSchema("col1", "int", "")); + Map params1 = new HashMap(); + params1.put("key", "value"); + sd1.setCols(cols1); + sd1.setParameters(params1); + sd1.setLocation("loc1"); + tbl1.setSd(sd1); + tbl1.setPartitionKeys(new ArrayList()); + + Table tbl2 = new Table(); + StorageDescriptor sd2 = new StorageDescriptor(); + List cols2 = new ArrayList(); + cols2.add(new FieldSchema("col1", "int", "")); + Map params2 = new HashMap(); + params2.put("key", "value"); + sd2.setCols(cols2); + sd2.setParameters(params2); + sd2.setLocation("loc2"); + tbl2.setSd(sd2); + tbl2.setPartitionKeys(new ArrayList()); + + Table tbl3 = new Table(); + StorageDescriptor sd3 = new StorageDescriptor(); + List cols3 = new ArrayList(); + cols3.add(new FieldSchema("col3", "int", "")); + Map params3 = new HashMap(); + params3.put("key2", "value2"); + sd3.setCols(cols3); + sd3.setParameters(params3); + sd3.setLocation("loc3"); + tbl3.setSd(sd3); + tbl3.setPartitionKeys(new ArrayList()); + + Table newTbl1 = new Table(); + newTbl1.setDbName("db2"); + newTbl1.setTableName("tbl1"); + StorageDescriptor newSd1 = new StorageDescriptor(); + List newCols1 = new ArrayList(); + newCols1.add(new FieldSchema("newcol1", "int", "")); + Map newParams1 = new HashMap(); + newParams1.put("key", "value"); + newSd1.setCols(newCols1); + newSd1.setParameters(params1); + newSd1.setLocation("loc1"); + newTbl1.setSd(newSd1); + newTbl1.setPartitionKeys(new ArrayList()); + + SharedCache.addTableToCache("db1", "tbl1", tbl1); + SharedCache.addTableToCache("db1", "tbl2", tbl2); + SharedCache.addTableToCache("db1", "tbl3", tbl3); + SharedCache.addTableToCache("db2", "tbl1", tbl1); + + Assert.assertEquals(SharedCache.getCachedTableCount(), 4); + Assert.assertEquals(SharedCache.getSdCache().size(), 2); + + Table t = SharedCache.getTableFromCache("db1", "tbl1"); + Assert.assertEquals(t.getSd().getLocation(), "loc1"); + + SharedCache.removeTableFromCache("db1", "tbl1"); + Assert.assertEquals(SharedCache.getCachedTableCount(), 3); + Assert.assertEquals(SharedCache.getSdCache().size(), 2); + + SharedCache.alterTableInCache("db2", "tbl1", newTbl1); + Assert.assertEquals(SharedCache.getCachedTableCount(), 3); + Assert.assertEquals(SharedCache.getSdCache().size(), 3); + + SharedCache.removeTableFromCache("db1", "tbl2"); + Assert.assertEquals(SharedCache.getCachedTableCount(), 2); + Assert.assertEquals(SharedCache.getSdCache().size(), 2); + } + + @Test + public void testSharedStorePartition() { + Partition part1 = new Partition(); + StorageDescriptor sd1 = new StorageDescriptor(); + List cols1 = new ArrayList(); + cols1.add(new FieldSchema("col1", "int", "")); + Map params1 = new HashMap(); + params1.put("key", "value"); + sd1.setCols(cols1); + sd1.setParameters(params1); + sd1.setLocation("loc1"); + part1.setSd(sd1); + part1.setValues(Arrays.asList("201701")); + + Partition part2 = new Partition(); + StorageDescriptor sd2 = new StorageDescriptor(); + List cols2 = new ArrayList(); + cols2.add(new FieldSchema("col1", "int", "")); + Map params2 = new HashMap(); + params2.put("key", "value"); + sd2.setCols(cols2); + sd2.setParameters(params2); + sd2.setLocation("loc2"); + part2.setSd(sd2); + part2.setValues(Arrays.asList("201702")); + + Partition part3 = new Partition(); + StorageDescriptor sd3 = new StorageDescriptor(); + List cols3 = new ArrayList(); + cols3.add(new FieldSchema("col3", "int", "")); + Map params3 = new HashMap(); + params3.put("key2", "value2"); + sd3.setCols(cols3); + sd3.setParameters(params3); + sd3.setLocation("loc3"); + part3.setSd(sd3); + part3.setValues(Arrays.asList("201703")); + + Partition newPart1 = new Partition(); + newPart1.setDbName("db1"); + newPart1.setTableName("tbl1"); + StorageDescriptor newSd1 = new StorageDescriptor(); + List newCols1 = new ArrayList(); + newCols1.add(new FieldSchema("newcol1", "int", "")); + Map newParams1 = new HashMap(); + newParams1.put("key", "value"); + newSd1.setCols(newCols1); + newSd1.setParameters(params1); + newSd1.setLocation("loc1"); + newPart1.setSd(newSd1); + newPart1.setValues(Arrays.asList("201701")); + + SharedCache.addPartitionToCache("db1", "tbl1", part1); + SharedCache.addPartitionToCache("db1", "tbl1", part2); + SharedCache.addPartitionToCache("db1", "tbl1", part3); + SharedCache.addPartitionToCache("db1", "tbl2", part1); + + Assert.assertEquals(SharedCache.getCachedPartitionCount(), 4); + Assert.assertEquals(SharedCache.getSdCache().size(), 2); + + Partition t = SharedCache.getPartitionFromCache("db1", "tbl1", Arrays.asList("201701")); + Assert.assertEquals(t.getSd().getLocation(), "loc1"); + + SharedCache.removePartitionFromCache("db1", "tbl2", Arrays.asList("201701")); + Assert.assertEquals(SharedCache.getCachedPartitionCount(), 3); + Assert.assertEquals(SharedCache.getSdCache().size(), 2); + + SharedCache.alterPartitionInCache("db1", "tbl1", Arrays.asList("201701"), newPart1); + Assert.assertEquals(SharedCache.getCachedPartitionCount(), 3); + Assert.assertEquals(SharedCache.getSdCache().size(), 3); + + SharedCache.removePartitionFromCache("db1", "tbl1", Arrays.asList("201702")); + Assert.assertEquals(SharedCache.getCachedPartitionCount(), 2); + Assert.assertEquals(SharedCache.getSdCache().size(), 2); + } +} http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java b/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java new file mode 100644 index 0000000..c278338 --- /dev/null +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/messaging/json/JSONMessageDeserializerTest.java @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.messaging.json; + +import org.codehaus.jackson.annotate.JsonProperty; +import org.json.JSONException; +import org.junit.Test; +import org.skyscreamer.jsonassert.JSONAssert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.*; + +public class JSONMessageDeserializerTest { + + public static class MyClass { + @JsonProperty + private int a; + @JsonProperty + private Map map; + private long l; + private String shouldNotSerialize = "shouldNotSerialize"; + + //for jackson to instantiate + MyClass() { + } + + MyClass(int a, Map map, long l) { + this.a = a; + this.map = map; + this.l = l; + } + + @JsonProperty + long getL() { + return l; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + MyClass myClass = (MyClass) o; + + if (a != myClass.a) + return false; + if (l != myClass.l) + return false; + if (!map.equals(myClass.map)) + return false; + return shouldNotSerialize.equals(myClass.shouldNotSerialize); + } + + @Override + public int hashCode() { + int result = a; + result = 31 * result + map.hashCode(); + result = 31 * result + (int) (l ^ (l >>> 32)); + result = 31 * result + shouldNotSerialize.hashCode(); + return result; + } + } + + @Test + public void shouldNotSerializePropertiesNotAnnotated() throws IOException, JSONException { + MyClass obj = new MyClass(Integer.MAX_VALUE, new HashMap() {{ + put("a", "a"); + put("b", "b"); + }}, Long.MAX_VALUE); + String json = JSONMessageDeserializer.mapper.writeValueAsString(obj); + JSONAssert.assertEquals( + "{\"a\":2147483647,\"map\":{\"b\":\"b\",\"a\":\"a\"},\"l\":9223372036854775807}", json, + false); + } + + @Test + public void shouldDeserializeJsonStringToObject() throws IOException { + String json = "{\"a\":47,\"map\":{\"a\":\"a\",\"b\":\"a value for b\"},\"l\":98}"; + MyClass actual = JSONMessageDeserializer.mapper.readValue(json, MyClass.class); + MyClass expected = new MyClass(47, new HashMap() {{ + put("a", "a"); + put("b", "a value for b"); + }}, 98L); + assertEquals(expected, actual); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java ---------------------------------------------------------------------- diff --git a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java index 79ccc6b..ec653ed 100644 --- a/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java +++ b/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestValidCompactorTxnList.java @@ -22,52 +22,64 @@ import org.apache.hadoop.hive.common.ValidTxnList; import org.junit.Assert; import org.junit.Test; +import java.util.BitSet; + public class TestValidCompactorTxnList { @Test public void minTxnHigh() { - ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 4}, 2); + BitSet bitSet = new BitSet(2); + bitSet.set(0, bitSet.length()); + ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 4}, bitSet, 2); ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9); Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp); } @Test public void maxTxnLow() { - ValidTxnList txns = new ValidCompactorTxnList(new long[]{13, 14}, 12); + BitSet bitSet = new BitSet(2); + bitSet.set(0, bitSet.length()); + ValidTxnList txns = new ValidCompactorTxnList(new long[]{13, 14}, bitSet, 12); ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9); Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp); } @Test public void minTxnHighNoExceptions() { - ValidTxnList txns = new ValidCompactorTxnList(new long[0], 5); + ValidTxnList txns = new ValidCompactorTxnList(new long[0], new BitSet(), 5); ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9); Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp); } @Test public void maxTxnLowNoExceptions() { - ValidTxnList txns = new ValidCompactorTxnList(new long[0], 15); + ValidTxnList txns = new ValidCompactorTxnList(new long[0], new BitSet(), 15); ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9); Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp); } @Test public void exceptionsAllBelow() { - ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 6}, 3); + BitSet bitSet = new BitSet(2); + bitSet.set(0, bitSet.length()); + ValidTxnList txns = new ValidCompactorTxnList(new long[]{3, 6}, bitSet, 3); ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9); Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp); } @Test public void exceptionsInMidst() { - ValidTxnList txns = new ValidCompactorTxnList(new long[]{8}, 7); + BitSet bitSet = new BitSet(1); + bitSet.set(0, bitSet.length()); + ValidTxnList txns = new ValidCompactorTxnList(new long[]{8}, bitSet, 7); ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9); Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp); } @Test public void exceptionsAbveHighWaterMark() { - ValidTxnList txns = new ValidCompactorTxnList(new long[]{8, 11, 17, 29}, 15); + BitSet bitSet = new BitSet(4); + bitSet.set(0, bitSet.length()); + ValidTxnList txns = new ValidCompactorTxnList(new long[]{8, 11, 17, 29}, bitSet, 15); Assert.assertArrayEquals("", new long[]{8, 11}, txns.getInvalidTransactions()); ValidTxnList.RangeResponse rsp = txns.isTxnRangeValid(7, 9); Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp); @@ -77,17 +89,19 @@ public class TestValidCompactorTxnList { @Test public void writeToString() { - ValidTxnList txns = new ValidCompactorTxnList(new long[]{9, 7, 10, Long.MAX_VALUE}, 8); - Assert.assertEquals("8:" + Long.MAX_VALUE + ":7", txns.writeToString()); + BitSet bitSet = new BitSet(4); + bitSet.set(0, bitSet.length()); + ValidTxnList txns = new ValidCompactorTxnList(new long[]{7, 9, 10, Long.MAX_VALUE}, bitSet, 8); + Assert.assertEquals("8:" + Long.MAX_VALUE + ":7:", txns.writeToString()); txns = new ValidCompactorTxnList(); - Assert.assertEquals(Long.toString(Long.MAX_VALUE) + ":" + Long.MAX_VALUE + ":", txns.writeToString()); - txns = new ValidCompactorTxnList(new long[0], 23); - Assert.assertEquals("23:" + Long.MAX_VALUE + ":", txns.writeToString()); + Assert.assertEquals(Long.toString(Long.MAX_VALUE) + ":" + Long.MAX_VALUE + "::", txns.writeToString()); + txns = new ValidCompactorTxnList(new long[0], new BitSet(), 23); + Assert.assertEquals("23:" + Long.MAX_VALUE + "::", txns.writeToString()); } @Test public void readFromString() { - ValidCompactorTxnList txns = new ValidCompactorTxnList("37:" + Long.MAX_VALUE + ":7:9:10"); + ValidCompactorTxnList txns = new ValidCompactorTxnList("37:" + Long.MAX_VALUE + "::7,9,10"); Assert.assertEquals(37L, txns.getHighWatermark()); Assert.assertEquals(Long.MAX_VALUE, txns.getMinOpenTxn()); Assert.assertArrayEquals(new long[]{7L, 9L, 10L}, txns.getInvalidTransactions()); @@ -96,4 +110,27 @@ public class TestValidCompactorTxnList { Assert.assertEquals(Long.MAX_VALUE, txns.getMinOpenTxn()); Assert.assertEquals(0, txns.getInvalidTransactions().length); } + + @Test + public void testAbortedTxn() throws Exception { + ValidCompactorTxnList txnList = new ValidCompactorTxnList("5:4::1,2,3"); + Assert.assertEquals(5L, txnList.getHighWatermark()); + Assert.assertEquals(4, txnList.getMinOpenTxn()); + Assert.assertArrayEquals(new long[]{1L, 2L, 3L}, txnList.getInvalidTransactions()); + } + + @Test + public void testAbortedRange() throws Exception { + ValidCompactorTxnList txnList = new ValidCompactorTxnList("11:4::5,6,7,8"); + ValidTxnList.RangeResponse rsp = txnList.isTxnRangeAborted(1L, 3L); + Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp); + rsp = txnList.isTxnRangeAborted(9L, 10L); + Assert.assertEquals(ValidTxnList.RangeResponse.NONE, rsp); + rsp = txnList.isTxnRangeAborted(6L, 7L); + Assert.assertEquals(ValidTxnList.RangeResponse.ALL, rsp); + rsp = txnList.isTxnRangeAborted(4L, 6L); + Assert.assertEquals(ValidTxnList.RangeResponse.SOME, rsp); + rsp = txnList.isTxnRangeAborted(6L, 13L); + Assert.assertEquals(ValidTxnList.RangeResponse.SOME, rsp); + } } http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/packaging/pom.xml ---------------------------------------------------------------------- diff --git a/packaging/pom.xml b/packaging/pom.xml index a128036..beddd1c 100644 --- a/packaging/pom.xml +++ b/packaging/pom.xml @@ -19,7 +19,7 @@ org.apache.hive hive - 2.2.0-SNAPSHOT + 3.0.0-SNAPSHOT ../pom.xml http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/packaging/src/main/assembly/src.xml ---------------------------------------------------------------------- diff --git a/packaging/src/main/assembly/src.xml b/packaging/src/main/assembly/src.xml index 0529e90..8626922 100644 --- a/packaging/src/main/assembly/src.xml +++ b/packaging/src/main/assembly/src.xml @@ -67,7 +67,6 @@ contrib/**/* data/**/* dev-support/**/* - docs/**/* druid-handler/**/* jdbc-handler/**/* find-bugs/**/* @@ -97,6 +96,7 @@ spark-client/**/* storage-api/**/* testutils/**/* + vector-code-gen/**/* / http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index 3ea3c77..e0aae27 100644 --- a/pom.xml +++ b/pom.xml @@ -17,11 +17,11 @@ org.apache apache - 14 + 18 org.apache.hive hive - 2.2.0-SNAPSHOT + 3.0.0-SNAPSHOT pom Hive @@ -61,10 +61,12 @@ - 2.2.0 + 3.0.0 UTF-8 + 1.8 + 1.8 false ${settings.localRepository} . @@ -90,11 +92,11 @@ 1.0b3 3.3.0-release - -Xmx1024m -XX:MaxPermSize=256M + -Xmx1024m 1.7 2.3 2.12.1 - 3.1 + 3.6.1 1.3.1 2.4 2.4 @@ -113,10 +115,10 @@ 3.5.2 1.5.6 0.1 - 1.8.0 + 1.9.0 1.7.7 0.8.0.RELEASE - 1.10.0 + 1.12.0 4.2.4 4.1.17 4.1.19 @@ -139,16 +141,16 @@ 14.0.1 2.4.4 1.3.166 - 2.7.2 + 2.8.0 ${basedir}/${hive.path.to.root}/testutils/hadoop 1.1 1.1.1 3.3.0 - 2.5.1 + 2.6.1 - 4.4 - 4.4 + 4.5.2 + 4.4.4 2.4.0 1.9.13 @@ -157,10 +159,10 @@ 2.3.4 2.3.1 0.3.2 - 3.0.0.v201112011016 + 3.1.0 5.5.1 3.0.1 - 7.6.0.v20120127 + 9.3.8.v20160314 1.14 2.22.2 @@ -175,7 +177,7 @@ 0.9.3 2.6.2 2.3 - 1.3.1 + 1.3.3 1.9.5 2.0.0-M5 4.0.29.Final @@ -185,9 +187,9 @@ 1.0.1 1.7.10 4.0.4 - 2.3.0-SNAPSHOT + 3.0.0-SNAPSHOT 0.8.4 - 0.90.2-incubating + 0.92.0-incubating 2.2.0 2.0.0 2.11 @@ -608,11 +610,31 @@ ${jackson.version} - org.eclipse.jetty.aggregate - jetty-all-server + org.eclipse.jetty + jetty-rewrite ${jetty.version} + org.eclipse.jetty + jetty-server + ${jetty.version} + + + org.eclipse.jetty + jetty-servlet + ${jetty.version} + + + org.eclipse.jetty + jetty-webapp + ${jetty.version} + + + javax.servlet + javax.servlet-api + ${javax-servlet.version} + + org.datanucleus datanucleus-api-jdo ${datanucleus-api-jdo.version} @@ -662,13 +684,24 @@ commons-logging - + + + org.apache.hadoop + hadoop-auth + ${hadoop.version} + + + commmons-logging + commons-logging + + + org.apache.hadoop hadoop-common ${hadoop.version} - + org.slf4j slf4j-log4j12 @@ -708,7 +741,7 @@ org.apache.hadoop - hadoop-mapreduce-client-core + hadoop-mapreduce-client-common ${hadoop.version} @@ -723,10 +756,55 @@ org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + + + org.slf4j + slf4j-log4j12 + + + commmons-logging + commons-logging + + + + + org.apache.hadoop hadoop-minikdc ${hadoop.version} + org.apache.hadoop + hadoop-yarn-api + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-client + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-registry + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-server-web-common + ${hadoop.version} + + + org.apache.hadoop + hadoop-yarn-server-web-proxy + ${hadoop.version} + + org.apache.hbase hbase-common ${hbase.version} @@ -803,10 +881,6 @@ org.apache.maven.plugins maven-compiler-plugin ${maven.compiler.plugin.version} - - 1.7 - 1.7 - org.apache.maven.plugins @@ -1106,12 +1180,12 @@ 0.10 + binary-package-licenses/** data/** conf/** checkstyle/** bin/** itests/** - docs/** **/README.md **/*.iml **/*.txt http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/pom.xml ---------------------------------------------------------------------- diff --git a/ql/pom.xml b/ql/pom.xml index 7db0ede..40a216b 100644 --- a/ql/pom.xml +++ b/ql/pom.xml @@ -19,7 +19,7 @@ org.apache.hive hive - 2.2.0-SNAPSHOT + 3.0.0-SNAPSHOT ../pom.xml @@ -229,6 +229,13 @@ org.apache.hadoop + hadoop-yarn-registry + ${hadoop.version} + true + test + + + org.apache.hadoop hadoop-mapreduce-client-core ${hadoop.version} true @@ -379,12 +386,22 @@ com.fasterxml.jackson.core jackson-core + + org.apache.calcite.avatica + avatica-core + org.apache.calcite calcite-druid ${calcite.version} + + + org.apache.calcite.avatica + avatica-core + + org.apache.calcite.avatica @@ -712,6 +729,12 @@ ${glassfish.jersey.version} test + + org.hamcrest + hamcrest-all + ${hamcrest.version} + test + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt index 4393c3b..46cbb5b 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFAvg.txt @@ -471,7 +471,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt index 7468c2f..2261e1b 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMax.txt @@ -442,7 +442,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt index 57b7ea5..58d2d22 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxDecimal.txt @@ -458,7 +458,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt index 749e97e..515692e 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxIntervalDayTime.txt @@ -441,7 +441,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt index 9dfc147..c210e4c 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxString.txt @@ -81,7 +81,7 @@ public class extends VectorAggregateExpression { @Override public int getVariableSize() { JavaDataModel model = JavaDataModel.get(); - return model.lengthForByteArrayOfSize(bytes.length); + return (int) model.lengthForByteArrayOfSize(bytes.length); } @Override @@ -388,7 +388,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt index 32ecb34..074aefd 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFMinMaxTimestamp.txt @@ -443,7 +443,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt index bd0f14d..a89ae0a 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFSum.txt @@ -433,7 +433,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object(), http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt index dc9d4b1..1e3516b 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVar.txt @@ -513,7 +513,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt ---------------------------------------------------------------------- diff --git a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt index 01062a9..b3ec7e9 100644 --- a/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt +++ b/ql/src/gen/vectorization/UDAFTemplates/VectorUDAFVarDecimal.txt @@ -467,7 +467,7 @@ public class extends VectorAggregateExpression { } @Override - public int getAggregationBufferFixedSize() { + public long getAggregationBufferFixedSize() { JavaDataModel model = JavaDataModel.get(); return JavaDataModel.alignUp( model.object() + @@ -488,4 +488,4 @@ public class extends VectorAggregateExpression { public void setInputExpression(VectorExpression inputExpression) { this.inputExpression = inputExpression; } -} \ No newline at end of file +} http://git-wip-us.apache.org/repos/asf/hive/blob/187eb760/ql/src/java/org/apache/hadoop/hive/ql/Context.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Context.java b/ql/src/java/org/apache/hadoop/hive/ql/Context.java index 758c536..da1d3a5 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/Context.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/Context.java @@ -356,9 +356,7 @@ public class Context { if (mkdir) { try { - boolean inheritPerms = HiveConf.getBoolVar(conf, - HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS); - if (!FileUtils.mkdir(fs, dir, inheritPerms, conf)) { + if (!FileUtils.mkdir(fs, dir, conf)) { throw new IllegalStateException("Cannot create staging directory '" + dir.toString() + "'"); } @@ -951,6 +949,13 @@ public class Context { public ExplainConfiguration getExplainConfig() { return explainConfig; } + private boolean isExplainPlan = false; + public boolean isExplainPlan() { + return isExplainPlan; + } + public void setExplainPlan(boolean t) { + this.isExplainPlan = t; + } public void setExplainConfig(ExplainConfiguration explainConfig) { this.explainConfig = explainConfig;