Return-Path: Delivered-To: apmail-hive-commits-archive@www.apache.org Received: (qmail 71196 invoked from network); 20 Dec 2010 19:31:50 -0000 Received: from unknown (HELO mail.apache.org) (140.211.11.3) by 140.211.11.9 with SMTP; 20 Dec 2010 19:31:50 -0000 Received: (qmail 83728 invoked by uid 500); 20 Dec 2010 19:31:50 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 83697 invoked by uid 500); 20 Dec 2010 19:31:49 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 83689 invoked by uid 99); 20 Dec 2010 19:31:49 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 20 Dec 2010 19:31:49 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=10.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 20 Dec 2010 19:31:48 +0000 Received: by eris.apache.org (Postfix, from userid 65534) id 4AF872388994; Mon, 20 Dec 2010 19:31:28 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1051251 - in /hive/trunk: CHANGES.txt metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Date: Mon, 20 Dec 2010 19:31:28 -0000 To: commits@hive.apache.org From: namit@apache.org X-Mailer: svnmailer-1.0.8 Message-Id: <20101220193128.4AF872388994@eris.apache.org> Author: namit Date: Mon Dec 20 19:31:27 2010 New Revision: 1051251 URL: http://svn.apache.org/viewvc?rev=1051251&view=rev Log: HIVE-1854 Temporarily disable metastore tests for listPartitionsByFilter() (Paul Yang via namit) Modified: hive/trunk/CHANGES.txt hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Modified: hive/trunk/CHANGES.txt URL: http://svn.apache.org/viewvc/hive/trunk/CHANGES.txt?rev=1051251&r1=1051250&r2=1051251&view=diff ============================================================================== --- hive/trunk/CHANGES.txt (original) +++ hive/trunk/CHANGES.txt Mon Dec 20 19:31:27 2010 @@ -619,6 +619,9 @@ Trunk - Unreleased HIVE-1845 Some attributes in eclipse template file are deprecated (Liyin Tang via namit) + HIVE-1854 Temporarily disable metastore tests for listPartitionsByFilter() + (Paul Yang via namit) + TESTS HIVE-1464. improve test query performance Modified: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java?rev=1051251&r1=1051250&r2=1051251&view=diff ============================================================================== --- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (original) +++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Mon Dec 20 19:31:27 2010 @@ -990,165 +990,8 @@ public abstract class TestHiveMetaStore * @throws Exception */ public void testPartitionFilter() throws Exception { - String dbName = "filterdb"; - String tblName = "filtertbl"; - - List vals = new ArrayList(3); - vals.add("p11"); - vals.add("p21"); - vals.add("p31"); - List vals2 = new ArrayList(3); - vals2.add("p11"); - vals2.add("p22"); - vals2.add("p31"); - List vals3 = new ArrayList(3); - vals3.add("p12"); - vals3.add("p21"); - vals3.add("p31"); - List vals4 = new ArrayList(3); - vals4.add("p12"); - vals4.add("p23"); - vals4.add("p31"); - List vals5 = new ArrayList(3); - vals5.add("p13"); - vals5.add("p24"); - vals5.add("p31"); - List vals6 = new ArrayList(3); - vals6.add("p13"); - vals6.add("p25"); - vals6.add("p31"); - - silentDropDatabase(dbName); - - Database db = new Database(); - db.setName(dbName); - client.createDatabase(db); - - ArrayList cols = new ArrayList(2); - cols.add(new FieldSchema("c1", Constants.STRING_TYPE_NAME, "")); - cols.add(new FieldSchema("c2", Constants.INT_TYPE_NAME, "")); - - ArrayList partCols = new ArrayList(3); - partCols.add(new FieldSchema("p1", Constants.STRING_TYPE_NAME, "")); - partCols.add(new FieldSchema("p2", Constants.STRING_TYPE_NAME, "")); - partCols.add(new FieldSchema("p3", Constants.INT_TYPE_NAME, "")); - - Table tbl = new Table(); - tbl.setDbName(dbName); - tbl.setTableName(tblName); - StorageDescriptor sd = new StorageDescriptor(); - tbl.setSd(sd); - sd.setCols(cols); - sd.setCompressed(false); - sd.setNumBuckets(1); - sd.setParameters(new HashMap()); - sd.setBucketCols(new ArrayList()); - sd.setSerdeInfo(new SerDeInfo()); - sd.getSerdeInfo().setName(tbl.getTableName()); - sd.getSerdeInfo().setParameters(new HashMap()); - sd.getSerdeInfo().getParameters() - .put(Constants.SERIALIZATION_FORMAT, "1"); - sd.setSortCols(new ArrayList()); - - tbl.setPartitionKeys(partCols); - client.createTable(tbl); - - tbl = client.getTable(dbName, tblName); - - add_partition(client, tbl, vals, "part1"); - add_partition(client, tbl, vals2, "part2"); - add_partition(client, tbl, vals3, "part3"); - add_partition(client, tbl, vals4, "part4"); - add_partition(client, tbl, vals5, "part5"); - add_partition(client, tbl, vals6, "part6"); - - checkFilter(client, dbName, tblName, "p1 = \"p11\"", 2); - checkFilter(client, dbName, tblName, "p1 = \"p12\"", 2); - checkFilter(client, dbName, tblName, "p2 = \"p21\"", 2); - checkFilter(client, dbName, tblName, "p2 = \"p23\"", 1); - checkFilter(client, dbName, tblName, "p1 = \"p11\" and p2=\"p22\"", 1); - checkFilter(client, dbName, tblName, "p1 = \"p11\" or p2=\"p23\"", 3); - checkFilter(client, dbName, tblName, "p1 = \"p11\" or p1=\"p12\"", 4); - - checkFilter(client, dbName, tblName, - "p1 = \"p11\" or (p1=\"p12\" and p2=\"p21\")", 3); - checkFilter(client, dbName, tblName, - "p1 = \"p11\" or (p1=\"p12\" and p2=\"p21\") Or " + - "(p1=\"p13\" aNd p2=\"p24\")", 4); - //test for and or precedence - checkFilter(client, dbName, tblName, - "p1=\"p12\" and (p2=\"p27\" Or p2=\"p21\")", 1); - checkFilter(client, dbName, tblName, - "p1=\"p12\" and p2=\"p27\" Or p2=\"p21\"", 2); - - checkFilter(client, dbName, tblName, "p1 > \"p12\"", 2); - checkFilter(client, dbName, tblName, "p1 >= \"p12\"", 4); - checkFilter(client, dbName, tblName, "p1 < \"p12\"", 2); - checkFilter(client, dbName, tblName, "p1 <= \"p12\"", 4); - checkFilter(client, dbName, tblName, "p1 <> \"p12\"", 4); - checkFilter(client, dbName, tblName, "p1 like \"p1.*\"", 6); - checkFilter(client, dbName, tblName, "p2 like \"p.*3\"", 1); - - //Test for setting the maximum partition count - List partitions = client.listPartitionsByFilter(dbName, - tblName, "p1 >= \"p12\"", (short) 2); - assertEquals("User specified row limit for partitions", - 2, partitions.size()); - - //Negative tests - Exception me = null; - try { - client.listPartitionsByFilter(dbName, - tblName, "p3 >= \"p12\"", (short) -1); - } catch(MetaException e) { - me = e; - } - assertNotNull(me); - assertTrue("Filter on int partition key", me.getMessage().contains( - "Filtering is supported only on partition keys of type string")); - - me = null; - try { - client.listPartitionsByFilter(dbName, - tblName, "c1 >= \"p12\"", (short) -1); - } catch(MetaException e) { - me = e; - } - assertNotNull(me); - assertTrue("Filter on invalid key", me.getMessage().contains( - " is not a partitioning key for the table")); - - me = null; - try { - client.listPartitionsByFilter(dbName, - tblName, "c1 >= ", (short) -1); - } catch(MetaException e) { - me = e; - } - assertNotNull(me); - assertTrue("Invalid filter string", me.getMessage().contains( - "Error parsing partition filter")); - - me = null; - try { - client.listPartitionsByFilter("invDBName", - "invTableName", "p1 = \"p11\"", (short) -1); - } catch(NoSuchObjectException e) { - me = e; - } - assertNotNull(me); - assertTrue("NoSuchObject exception", me.getMessage().contains( - "database/table does not exist")); - } - - private void checkFilter(HiveMetaStoreClient client, String dbName, - String tblName, String filter, int expectedCount) - throws MetaException, NoSuchObjectException, TException { - List partitions = client.listPartitionsByFilter(dbName, - tblName, filter, (short) -1); - - assertEquals("Partition count expected for filter " + filter, - expectedCount, partitions.size()); + // Tests listMPartitionsByFilter() introduced by HIVE-1609. Temporarily + // disabled until issues identified by HIVE-1853 are resolved. } private void add_partition(HiveMetaStoreClient client, Table table,