incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tra...@apache.org
Subject svn commit: r1383152 [16/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ ...
Date Mon, 10 Sep 2012 23:29:03 GMT
Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/NoExitSecurityManager.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/NoExitSecurityManager.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/NoExitSecurityManager.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/NoExitSecurityManager.java Mon Sep 10 23:28:55 2012
@@ -22,20 +22,20 @@ import java.security.Permission;
 
 public class NoExitSecurityManager extends SecurityManager {
 
-  @Override
-  public void checkPermission(Permission perm) {
-    // allow anything.
-  }
+    @Override
+    public void checkPermission(Permission perm) {
+        // allow anything.
+    }
 
-  @Override
-  public void checkPermission(Permission perm, Object context) {
-    // allow anything.
-  }
+    @Override
+    public void checkPermission(Permission perm, Object context) {
+        // allow anything.
+    }
 
-  @Override
-  public void checkExit(int status) {
+    @Override
+    public void checkExit(int status) {
 
-    super.checkExit(status);
-    throw new ExitException(status);
-  }
+        super.checkExit(status);
+        throw new ExitException(status);
+    }
 }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/DummyStorageHandler.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/DummyStorageHandler.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/DummyStorageHandler.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/DummyStorageHandler.java Mon Sep 10 23:28:55 2012
@@ -91,7 +91,7 @@ class DummyStorageHandler extends HCatSt
 
     @Override
     public HiveAuthorizationProvider getAuthorizationProvider()
-            throws HiveException {
+        throws HiveException {
         return new DummyAuthProvider();
     }
 
@@ -140,8 +140,8 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public void authorize(Privilege[] readRequiredPriv,
-                Privilege[] writeRequiredPriv) throws HiveException,
-                AuthorizationException {
+                              Privilege[] writeRequiredPriv) throws HiveException,
+            AuthorizationException {
         }
 
         /* @param db
@@ -153,8 +153,8 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public void authorize(Database db, Privilege[] readRequiredPriv,
-                Privilege[] writeRequiredPriv) throws HiveException,
-                AuthorizationException {
+                              Privilege[] writeRequiredPriv) throws HiveException,
+            AuthorizationException {
         }
 
         /* @param table
@@ -166,8 +166,8 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public void authorize(org.apache.hadoop.hive.ql.metadata.Table table, Privilege[] readRequiredPriv,
-                Privilege[] writeRequiredPriv) throws HiveException,
-                AuthorizationException {
+                              Privilege[] writeRequiredPriv) throws HiveException,
+            AuthorizationException {
         }
 
         /* @param part
@@ -179,8 +179,8 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public void authorize(Partition part, Privilege[] readRequiredPriv,
-                Privilege[] writeRequiredPriv) throws HiveException,
-                AuthorizationException {
+                              Privilege[] writeRequiredPriv) throws HiveException,
+            AuthorizationException {
         }
 
         /* @param table
@@ -194,8 +194,8 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public void authorize(org.apache.hadoop.hive.ql.metadata.Table table, Partition part, List<String> columns,
-                Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
-                throws HiveException, AuthorizationException {
+                              Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
+            throws HiveException, AuthorizationException {
         }
 
     }
@@ -205,7 +205,7 @@ class DummyStorageHandler extends HCatSt
      * mapred.InputFormat required by HiveStorageHandler.
      */
     class DummyInputFormat implements
-            InputFormat<WritableComparable, HCatRecord> {
+        InputFormat<WritableComparable, HCatRecord> {
 
         /*
          * @see
@@ -215,8 +215,8 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public RecordReader<WritableComparable, HCatRecord> getRecordReader(
-                InputSplit split, JobConf jobconf, Reporter reporter)
-                throws IOException {
+            InputSplit split, JobConf jobconf, Reporter reporter)
+            throws IOException {
             throw new IOException("This operation is not supported.");
         }
 
@@ -227,7 +227,7 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public InputSplit[] getSplits(JobConf jobconf, int number)
-                throws IOException {
+            throws IOException {
             throw new IOException("This operation is not supported.");
         }
     }
@@ -237,8 +237,8 @@ class DummyStorageHandler extends HCatSt
      * mapred.OutputFormat and HiveOutputFormat required by HiveStorageHandler.
      */
     class DummyOutputFormat implements
-            OutputFormat<WritableComparable<?>, HCatRecord>,
-            HiveOutputFormat<WritableComparable<?>, HCatRecord> {
+        OutputFormat<WritableComparable<?>, HCatRecord>,
+        HiveOutputFormat<WritableComparable<?>, HCatRecord> {
 
         /*
          * @see
@@ -247,7 +247,7 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public void checkOutputSpecs(FileSystem fs, JobConf jobconf)
-                throws IOException {
+            throws IOException {
             throw new IOException("This operation is not supported.");
 
         }
@@ -260,8 +260,8 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public RecordWriter<WritableComparable<?>, HCatRecord> getRecordWriter(
-                FileSystem fs, JobConf jobconf, String str,
-                Progressable progress) throws IOException {
+            FileSystem fs, JobConf jobconf, String str,
+            Progressable progress) throws IOException {
             throw new IOException("This operation is not supported.");
         }
 
@@ -274,10 +274,10 @@ class DummyStorageHandler extends HCatSt
          */
         @Override
         public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(
-                JobConf jc, Path finalOutPath,
-                Class<? extends Writable> valueClass, boolean isCompressed,
-                Properties tableProperties, Progressable progress)
-                throws IOException {
+            JobConf jc, Path finalOutPath,
+            Class<? extends Writable> valueClass, boolean isCompressed,
+            Properties tableProperties, Progressable progress)
+            throws IOException {
             throw new IOException("This operation is not supported.");
         }
 

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestPermsGrp.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestPermsGrp.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestPermsGrp.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestPermsGrp.java Mon Sep 10 23:28:55 2012
@@ -53,179 +53,177 @@ import org.slf4j.LoggerFactory;
 
 public class TestPermsGrp extends TestCase {
 
-  private boolean isServerRunning = false;
-  private static final int msPort = 20101;
-  private HiveConf hcatConf;
-  private Warehouse clientWH;
-  private HiveMetaStoreClient msc;
-  private static final Logger LOG = LoggerFactory.getLogger(TestPermsGrp.class);
-
-  @Override
-  protected void tearDown() throws Exception {
-    System.setSecurityManager(securityManager);
-  }
-
-  @Override
-  protected void setUp() throws Exception {
-
-    if(isServerRunning) {
-      return;
-    }
-
-    MetaStoreUtils.startMetaStore(msPort, ShimLoader.getHadoopThriftAuthBridge());
-
-    isServerRunning = true;
-
-    securityManager = System.getSecurityManager();
-    System.setSecurityManager(new NoExitSecurityManager());
-
-    hcatConf = new HiveConf(this.getClass());
-    hcatConf.set("hive.metastore.local", "false");
-    hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" + msPort);
-    hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3);
-
-    hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
-    hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
-    hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-    hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-    clientWH = new Warehouse(hcatConf);
-    msc = new HiveMetaStoreClient(hcatConf,null);
-    System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
-    System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
-  }
-
-
-  public void testCustomPerms() throws Exception {
-
-    String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
-    String tblName = "simptbl";
-    String typeName = "Person";
-
-    try {
-
-      // Lets first test for default permissions, this is the case when user specified nothing.
-      Table tbl = getTable(dbName,tblName,typeName);
-      msc.createTable(tbl);
-      Database db = Hive.get(hcatConf).getDatabase(dbName);
-      Path dfsPath = clientWH.getTablePath(db, tblName);
-      cleanupTbl(dbName, tblName, typeName);
-
-      // Next user did specify perms.
-      try{
-        HCatCli.main(new String[]{"-e","create table simptbl (name string) stored as RCFILE", "-p","rwx-wx---"});
-      }
-      catch(Exception e){
-        assertTrue(e instanceof ExitException);
-        assertEquals(((ExitException)e).getStatus(), 0);
-      }
-      dfsPath = clientWH.getTablePath(db, tblName);
-      assertTrue(dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission().equals(FsPermission.valueOf("drwx-wx---")));
-
-      cleanupTbl(dbName, tblName, typeName);
-
-      // User specified perms in invalid format.
-      hcatConf.set(HCatConstants.HCAT_PERMS, "rwx");
-      // make sure create table fails.
-      try{
-        HCatCli.main(new String[]{"-e","create table simptbl (name string) stored as RCFILE", "-p","rwx"});
-        assert false;
-      }catch(Exception me){
-        assertTrue(me instanceof ExitException);
-      }
-      // No physical dir gets created.
-      dfsPath = clientWH.getTablePath(db,tblName);
-      try{
-        dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
-        assert false;
-      } catch(Exception fnfe){
-        assertTrue(fnfe instanceof FileNotFoundException);
-      }
-
-      // And no metadata gets created.
-      try{
-        msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
-        assert false;
-      }catch (Exception e){
-        assertTrue(e instanceof NoSuchObjectException);
-        assertEquals("default.simptbl table not found", e.getMessage());
-      }
-
-      // test for invalid group name
-      hcatConf.set(HCatConstants.HCAT_PERMS, "drw-rw-rw-");
-      hcatConf.set(HCatConstants.HCAT_GROUP, "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER");
-
-      try{
-        // create table must fail.
-        HCatCli.main(new String[]{"-e","create table simptbl (name string) stored as RCFILE", "-p","rw-rw-rw-","-g","THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER"});
-        assert false;
-      }catch (Exception me){
-        assertTrue(me instanceof SecurityException);
-      }
-
-      try{
-        // no metadata should get created.
-        msc.getTable(dbName, tblName);
-        assert false;
-      }catch (Exception e){
-        assertTrue(e instanceof NoSuchObjectException);
-        assertEquals("default.simptbl table not found", e.getMessage());
-      }
-      try{
-        // neither dir should get created.
-        dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
-        assert false;
-      } catch(Exception e){
-        assertTrue(e instanceof FileNotFoundException);
-      }
-
-    } catch (Exception e) {
-        LOG.error("testCustomPerms failed.", e);
-      throw e;
-    }
-  }
-
-  private void silentDropDatabase(String dbName) throws MetaException, TException {
-    try {
-      for (String tableName : msc.getTables(dbName, "*")) {
-        msc.dropTable(dbName, tableName);
-      }
-
-    } catch (NoSuchObjectException e) {
-    }
-  }
-
-  private void cleanupTbl(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, InvalidOperationException{
-
-    msc.dropTable(dbName, tblName);
-    msc.dropType(typeName);
-  }
-
-  private Table getTable(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, AlreadyExistsException, InvalidObjectException{
-
-    msc.dropTable(dbName, tblName);
-    silentDropDatabase(dbName);
-
-
-    msc.dropType(typeName);
-    Type typ1 = new Type();
-    typ1.setName(typeName);
-    typ1.setFields(new ArrayList<FieldSchema>(1));
-    typ1.getFields().add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
-    msc.createType(typ1);
-
-    Table tbl = new Table();
-    tbl.setDbName(dbName);
-    tbl.setTableName(tblName);
-    StorageDescriptor sd = new StorageDescriptor();
-    tbl.setSd(sd);
-    sd.setCols(typ1.getFields());
-
-    sd.setSerdeInfo(new SerDeInfo());
-    return tbl;
-  }
+    private boolean isServerRunning = false;
+    private static final int msPort = 20101;
+    private HiveConf hcatConf;
+    private Warehouse clientWH;
+    private HiveMetaStoreClient msc;
+    private static final Logger LOG = LoggerFactory.getLogger(TestPermsGrp.class);
+
+    @Override
+    protected void tearDown() throws Exception {
+        System.setSecurityManager(securityManager);
+    }
+
+    @Override
+    protected void setUp() throws Exception {
+
+        if (isServerRunning) {
+            return;
+        }
+
+        MetaStoreUtils.startMetaStore(msPort, ShimLoader.getHadoopThriftAuthBridge());
+
+        isServerRunning = true;
+
+        securityManager = System.getSecurityManager();
+        System.setSecurityManager(new NoExitSecurityManager());
+
+        hcatConf = new HiveConf(this.getClass());
+        hcatConf.set("hive.metastore.local", "false");
+        hcatConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://127.0.0.1:" + msPort);
+        hcatConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3);
+
+        hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
+        hcatConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+        hcatConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+        hcatConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+        clientWH = new Warehouse(hcatConf);
+        msc = new HiveMetaStoreClient(hcatConf, null);
+        System.setProperty(HiveConf.ConfVars.PREEXECHOOKS.varname, " ");
+        System.setProperty(HiveConf.ConfVars.POSTEXECHOOKS.varname, " ");
+    }
+
 
+    public void testCustomPerms() throws Exception {
+
+        String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+        String tblName = "simptbl";
+        String typeName = "Person";
+
+        try {
+
+            // Lets first test for default permissions, this is the case when user specified nothing.
+            Table tbl = getTable(dbName, tblName, typeName);
+            msc.createTable(tbl);
+            Database db = Hive.get(hcatConf).getDatabase(dbName);
+            Path dfsPath = clientWH.getTablePath(db, tblName);
+            cleanupTbl(dbName, tblName, typeName);
+
+            // Next user did specify perms.
+            try {
+                HCatCli.main(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx-wx---"});
+            } catch (Exception e) {
+                assertTrue(e instanceof ExitException);
+                assertEquals(((ExitException) e).getStatus(), 0);
+            }
+            dfsPath = clientWH.getTablePath(db, tblName);
+            assertTrue(dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath).getPermission().equals(FsPermission.valueOf("drwx-wx---")));
+
+            cleanupTbl(dbName, tblName, typeName);
+
+            // User specified perms in invalid format.
+            hcatConf.set(HCatConstants.HCAT_PERMS, "rwx");
+            // make sure create table fails.
+            try {
+                HCatCli.main(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rwx"});
+                assert false;
+            } catch (Exception me) {
+                assertTrue(me instanceof ExitException);
+            }
+            // No physical dir gets created.
+            dfsPath = clientWH.getTablePath(db, tblName);
+            try {
+                dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
+                assert false;
+            } catch (Exception fnfe) {
+                assertTrue(fnfe instanceof FileNotFoundException);
+            }
+
+            // And no metadata gets created.
+            try {
+                msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
+                assert false;
+            } catch (Exception e) {
+                assertTrue(e instanceof NoSuchObjectException);
+                assertEquals("default.simptbl table not found", e.getMessage());
+            }
+
+            // test for invalid group name
+            hcatConf.set(HCatConstants.HCAT_PERMS, "drw-rw-rw-");
+            hcatConf.set(HCatConstants.HCAT_GROUP, "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER");
+
+            try {
+                // create table must fail.
+                HCatCli.main(new String[]{"-e", "create table simptbl (name string) stored as RCFILE", "-p", "rw-rw-rw-", "-g", "THIS_CANNOT_BE_A_VALID_GRP_NAME_EVER"});
+                assert false;
+            } catch (Exception me) {
+                assertTrue(me instanceof SecurityException);
+            }
+
+            try {
+                // no metadata should get created.
+                msc.getTable(dbName, tblName);
+                assert false;
+            } catch (Exception e) {
+                assertTrue(e instanceof NoSuchObjectException);
+                assertEquals("default.simptbl table not found", e.getMessage());
+            }
+            try {
+                // neither dir should get created.
+                dfsPath.getFileSystem(hcatConf).getFileStatus(dfsPath);
+                assert false;
+            } catch (Exception e) {
+                assertTrue(e instanceof FileNotFoundException);
+            }
+
+        } catch (Exception e) {
+            LOG.error("testCustomPerms failed.", e);
+            throw e;
+        }
+    }
+
+    private void silentDropDatabase(String dbName) throws MetaException, TException {
+        try {
+            for (String tableName : msc.getTables(dbName, "*")) {
+                msc.dropTable(dbName, tableName);
+            }
+
+        } catch (NoSuchObjectException e) {
+        }
+    }
+
+    private void cleanupTbl(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, InvalidOperationException {
+
+        msc.dropTable(dbName, tblName);
+        msc.dropType(typeName);
+    }
+
+    private Table getTable(String dbName, String tblName, String typeName) throws NoSuchObjectException, MetaException, TException, AlreadyExistsException, InvalidObjectException {
+
+        msc.dropTable(dbName, tblName);
+        silentDropDatabase(dbName);
+
+
+        msc.dropType(typeName);
+        Type typ1 = new Type();
+        typ1.setName(typeName);
+        typ1.setFields(new ArrayList<FieldSchema>(1));
+        typ1.getFields().add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+        msc.createType(typ1);
+
+        Table tbl = new Table();
+        tbl.setDbName(dbName);
+        tbl.setTableName(tblName);
+        StorageDescriptor sd = new StorageDescriptor();
+        tbl.setSd(sd);
+        sd.setCols(typ1.getFields());
+
+        sd.setSerdeInfo(new SerDeInfo());
+        return tbl;
+    }
 
 
-  private SecurityManager securityManager;
+    private SecurityManager securityManager;
 
 }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java Mon Sep 10 23:28:55 2012
@@ -52,340 +52,339 @@ import org.slf4j.LoggerFactory;
 
 public class TestSemanticAnalysis extends HCatBaseTest {
 
-  private static final Logger LOG = LoggerFactory.getLogger(TestSemanticAnalysis.class);
-  private static final String TBL_NAME = "junit_sem_analysis";
+    private static final Logger LOG = LoggerFactory.getLogger(TestSemanticAnalysis.class);
+    private static final String TBL_NAME = "junit_sem_analysis";
 
-  private Driver hcatDriver = null;
-  private String query;
+    private Driver hcatDriver = null;
+    private String query;
 
-  @Before
-  public void setUpHCatDriver() throws IOException {
-    if (hcatDriver == null) {
-      HiveConf hcatConf = new HiveConf(hiveConf);
-      hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
-          HCatSemanticAnalyzer.class.getName());
-      hcatDriver = new Driver(hcatConf);
-      SessionState.start(new CliSessionState(hcatConf));
-    }
-  }
-
-  @Test
-  public void testDescDB() throws CommandNeedRetryException, IOException {
-    hcatDriver.run("drop database mydb cascade");
-    assertEquals(0, hcatDriver.run("create database mydb").getResponseCode());
-    CommandProcessorResponse resp = hcatDriver.run("describe database mydb");
-    assertEquals(0, resp.getResponseCode());
-    ArrayList<String> result = new ArrayList<String>();
-    hcatDriver.getResults(result);
-    assertTrue(result.get(0).contains("mydb.db"));
-    hcatDriver.run("drop database mydb cascade");
-  }
-
-  @Test
-  public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException{
-    driver.run("drop table junit_sem_analysis");
-    CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
-    assertEquals(resp.getResponseCode(), 0);
-    assertEquals(null, resp.getErrorMessage());
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    assertEquals("Partition key name case problem", "b" , tbl.getPartitionKeys().get(0).getName());
-    driver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
-
-    driver.run("drop table junit_sem_analysis");
-    driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE");
-    driver.run("alter table junit_sem_analysis add partition (b='2010-10-10')");
-    hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') set fileformat RCFILE");
-
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    assertEquals(TextInputFormat.class.getName(), tbl.getSd().getInputFormat());
-    assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-
-    List<String> partVals = new ArrayList<String>(1);
-    partVals.add("2010-10-10");
-    Partition part = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME, partVals);
-
-    assertEquals(RCFileInputFormat.class.getName(),part.getSd().getInputFormat());
-    assertEquals(RCFileOutputFormat.class.getName(),part.getSd().getOutputFormat());
-
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testUsNonExistentDB() throws CommandNeedRetryException {
-      CommandProcessorResponse resp = hcatDriver.run("use no_such_db");
-      assertEquals(1, resp.getResponseCode());
-  }
-
-  @Test
-  public void testDatabaseOperations() throws MetaException, CommandNeedRetryException {
-
-    List<String> dbs = client.getAllDatabases();
-    String testDb1 = "testdatabaseoperatons1";
-    String testDb2 = "testdatabaseoperatons2";
-
-    if (dbs.contains(testDb1.toLowerCase())){
-      assertEquals(0,hcatDriver.run("drop database "+testDb1).getResponseCode());
-    }
-
-    if (dbs.contains(testDb2.toLowerCase())){
-      assertEquals(0,hcatDriver.run("drop database "+testDb2).getResponseCode());
-    }
-
-    assertEquals(0,hcatDriver.run("create database "+testDb1).getResponseCode());
-    assertTrue(client.getAllDatabases().contains(testDb1));
-    assertEquals(0, hcatDriver.run("create database if not exists " + testDb1).getResponseCode());
-    assertTrue(client.getAllDatabases().contains(testDb1));
-    assertEquals(0,hcatDriver.run("create database if not exists "+testDb2).getResponseCode());
-    assertTrue(client.getAllDatabases().contains(testDb2));
-
-    assertEquals(0,hcatDriver.run("drop database "+testDb1).getResponseCode());
-    assertEquals(0,hcatDriver.run("drop database "+testDb2).getResponseCode());
-    assertFalse(client.getAllDatabases().contains(testDb1));
-    assertFalse(client.getAllDatabases().contains(testDb2));
-  }
-
-  @Test
-  public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table "+ TBL_NAME);
-    hcatDriver.run("create table junit_sem_analysis (a int) stored as RCFILE");
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    List<FieldSchema> cols = tbl.getSd().getCols();
-    assertEquals(1, cols.size());
-    assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
-    assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
-    assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
-
-    CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE");
-    assertEquals(0, resp.getResponseCode());
-    assertNull(resp.getErrorMessage());
-    tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    cols = tbl.getSd().getCols();
-    assertEquals(1, cols.size());
-    assertTrue(cols.get(0).equals(new FieldSchema("a", "int",null)));
-    assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
-    assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAlterTblTouch() throws CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
-    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis touch");
-    assertEquals(0, response.getResponseCode());
-
-    hcatDriver.run("alter table junit_sem_analysis touch partition (b='12')");
-    assertEquals(0, response.getResponseCode());
-
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testChangeColumns() throws CommandNeedRetryException{
-    hcatDriver.run("drop table junit_sem_analysis");
-    hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
-    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis change a a1 int");
-    assertEquals(0, response.getResponseCode());
-
-    response = hcatDriver.run("alter table junit_sem_analysis change a1 a string");
-    assertEquals(0, response.getResponseCode());
-
-    response = hcatDriver.run("alter table junit_sem_analysis change a a int after c");
-    assertEquals(0, response.getResponseCode());
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
-    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis replace columns (a1 tinyint)");
-    assertEquals(0, response.getResponseCode());
-
-    response = hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)");
-    assertEquals(0, response.getResponseCode());
-    assertNull(response.getErrorMessage());
-
-    response = hcatDriver.run("describe extended junit_sem_analysis");
-    assertEquals(0, response.getResponseCode());
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    List<FieldSchema> cols = tbl.getSd().getCols();
-    assertEquals(2, cols.size());
-    assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null)));
-    assertTrue(cols.get(1).equals(new FieldSchema("d", "tinyint", null)));
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAlterTblClusteredBy() throws CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
-    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis clustered by (a) into 7 buckets");
-    assertEquals(0, response.getResponseCode());
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
-
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
-    assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-
-    hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
-        "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
-    hcatDriver.run("desc extended junit_sem_analysis");
-
-    tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
-    assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAddPartFail() throws CommandNeedRetryException{
-
-    driver.run("drop table junit_sem_analysis");
-    driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
-    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location 'README.txt'");
-    assertEquals(0, response.getResponseCode());
-    driver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAddPartPass() throws IOException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
-    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location '" + TEST_DATA_DIR + "'");
-    assertEquals(0, response.getResponseCode());
-    assertNull(response.getErrorMessage());
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testCTAS() throws CommandNeedRetryException{
-    hcatDriver.run("drop table junit_sem_analysis");
-    query = "create table junit_sem_analysis (a int) as select * from tbl2";
-    CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(40000, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("FAILED: SemanticException Operation not supported. Create table as Select is not a valid operation."));
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testStoredAs() throws CommandNeedRetryException{
-    hcatDriver.run("drop table junit_sem_analysis");
-    query = "create table junit_sem_analysis (a int)";
-    CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(0, response.getResponseCode());
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    query =  "create table junit_sem_analysis (a int) partitioned by (b string)  stored as " +
-        "INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
-        "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' ";
-    assertEquals(0,hcatDriver.run(query).getResponseCode());
-
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
-    assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
-    assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-
-    hcatDriver.run("drop table junit_sem_analysis");
-  }
-
-  @Test
-  public void testInvalidateNonStringPartition() throws IOException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    query =  "create table junit_sem_analysis (a int) partitioned by (b int)  stored as RCFILE";
-
-    CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(40000,response.getResponseCode());
-    assertEquals("FAILED: SemanticException Operation not supported. HCatalog only supports partition columns of type string. For column: b Found type: int",
-        response.getErrorMessage());
-
-  }
-
-  @Test
-  public void testInvalidateSeqFileStoredAs() throws IOException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    query =  "create table junit_sem_analysis (a int) partitioned by (b string)  stored as SEQUENCEFILE";
-
-    CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(0,response.getResponseCode());
-
-  }
-
-  @Test
-  public void testInvalidateTextFileStoredAs() throws IOException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    query =  "create table junit_sem_analysis (a int) partitioned by (b string)  stored as TEXTFILE";
-
-    CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(0,response.getResponseCode());
-
-  }
-
-  @Test
-  public void testInvalidateClusteredBy() throws IOException, CommandNeedRetryException{
-
-    hcatDriver.run("drop table junit_sem_analysis");
-    query =  "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
-
-    CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(0,response.getResponseCode());
-  }
-
-  @Test
-  public void testCTLFail() throws IOException, CommandNeedRetryException{
-
-    driver.run("drop table junit_sem_analysis");
-    driver.run("drop table like_table");
-    query =  "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
-
-    driver.run(query);
-    query = "create table like_table like junit_sem_analysis";
-    CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(0,response.getResponseCode());
-  }
-
-  @Test
-  public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{
-
-    try{
-      hcatDriver.run("drop table junit_sem_analysis");
-    }
-    catch( Exception e){
-      LOG.error("Error in drop table.",e);
-    }
-    query =  "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
-
-    hcatDriver.run(query);
-    String likeTbl = "like_table";
-    hcatDriver.run("drop table "+likeTbl);
-    query = "create table like_table like junit_sem_analysis";
-    CommandProcessorResponse resp = hcatDriver.run(query);
-    assertEquals(0, resp.getResponseCode());
+    @Before
+    public void setUpHCatDriver() throws IOException {
+        if (hcatDriver == null) {
+            HiveConf hcatConf = new HiveConf(hiveConf);
+            hcatConf.set(HiveConf.ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
+                    HCatSemanticAnalyzer.class.getName());
+            hcatDriver = new Driver(hcatConf);
+            SessionState.start(new CliSessionState(hcatConf));
+        }
+    }
+
+    @Test
+    public void testDescDB() throws CommandNeedRetryException, IOException {
+        hcatDriver.run("drop database mydb cascade");
+        assertEquals(0, hcatDriver.run("create database mydb").getResponseCode());
+        CommandProcessorResponse resp = hcatDriver.run("describe database mydb");
+        assertEquals(0, resp.getResponseCode());
+        ArrayList<String> result = new ArrayList<String>();
+        hcatDriver.getResults(result);
+        assertTrue(result.get(0).contains("mydb.db"));
+        hcatDriver.run("drop database mydb cascade");
+    }
+
+    @Test
+    public void testCreateTblWithLowerCasePartNames() throws CommandNeedRetryException, MetaException, TException, NoSuchObjectException {
+        driver.run("drop table junit_sem_analysis");
+        CommandProcessorResponse resp = driver.run("create table junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
+        assertEquals(resp.getResponseCode(), 0);
+        assertEquals(null, resp.getErrorMessage());
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals("Partition key name case problem", "b", tbl.getPartitionKeys().get(0).getName());
+        driver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTblFFpart() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        driver.run("drop table junit_sem_analysis");
+        driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as TEXTFILE");
+        driver.run("alter table junit_sem_analysis add partition (b='2010-10-10')");
+        hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') set fileformat RCFILE");
+
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(TextInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        List<String> partVals = new ArrayList<String>(1);
+        partVals.add("2010-10-10");
+        Partition part = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME, partVals);
+
+        assertEquals(RCFileInputFormat.class.getName(), part.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), part.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testUsNonExistentDB() throws CommandNeedRetryException {
+        CommandProcessorResponse resp = hcatDriver.run("use no_such_db");
+        assertEquals(1, resp.getResponseCode());
+    }
+
+    @Test
+    public void testDatabaseOperations() throws MetaException, CommandNeedRetryException {
+
+        List<String> dbs = client.getAllDatabases();
+        String testDb1 = "testdatabaseoperatons1";
+        String testDb2 = "testdatabaseoperatons2";
+
+        if (dbs.contains(testDb1.toLowerCase())) {
+            assertEquals(0, hcatDriver.run("drop database " + testDb1).getResponseCode());
+        }
+
+        if (dbs.contains(testDb2.toLowerCase())) {
+            assertEquals(0, hcatDriver.run("drop database " + testDb2).getResponseCode());
+        }
+
+        assertEquals(0, hcatDriver.run("create database " + testDb1).getResponseCode());
+        assertTrue(client.getAllDatabases().contains(testDb1));
+        assertEquals(0, hcatDriver.run("create database if not exists " + testDb1).getResponseCode());
+        assertTrue(client.getAllDatabases().contains(testDb1));
+        assertEquals(0, hcatDriver.run("create database if not exists " + testDb2).getResponseCode());
+        assertTrue(client.getAllDatabases().contains(testDb2));
+
+        assertEquals(0, hcatDriver.run("drop database " + testDb1).getResponseCode());
+        assertEquals(0, hcatDriver.run("drop database " + testDb2).getResponseCode());
+        assertFalse(client.getAllDatabases().contains(testDb1));
+        assertFalse(client.getAllDatabases().contains(testDb2));
+    }
+
+    @Test
+    public void testCreateTableIfNotExists() throws MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table " + TBL_NAME);
+        hcatDriver.run("create table junit_sem_analysis (a int) stored as RCFILE");
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        List<FieldSchema> cols = tbl.getSd().getCols();
+        assertEquals(1, cols.size());
+        assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE");
+        assertEquals(0, resp.getResponseCode());
+        assertNull(resp.getErrorMessage());
+        tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        cols = tbl.getSd().getCols();
+        assertEquals(1, cols.size());
+        assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTblTouch() throws CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis touch");
+        assertEquals(0, response.getResponseCode());
+
+        hcatDriver.run("alter table junit_sem_analysis touch partition (b='12')");
+        assertEquals(0, response.getResponseCode());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testChangeColumns() throws CommandNeedRetryException {
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis change a a1 int");
+        assertEquals(0, response.getResponseCode());
+
+        response = hcatDriver.run("alter table junit_sem_analysis change a1 a string");
+        assertEquals(0, response.getResponseCode());
+
+        response = hcatDriver.run("alter table junit_sem_analysis change a a int after c");
+        assertEquals(0, response.getResponseCode());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddReplaceCols() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis replace columns (a1 tinyint)");
+        assertEquals(0, response.getResponseCode());
+
+        response = hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
+
+        response = hcatDriver.run("describe extended junit_sem_analysis");
+        assertEquals(0, response.getResponseCode());
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        List<FieldSchema> cols = tbl.getSd().getCols();
+        assertEquals(2, cols.size());
+        assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null)));
+        assertTrue(cols.get(1).equals(new FieldSchema("d", "tinyint", null)));
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTblClusteredBy() throws CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis clustered by (a) into 7 buckets");
+        assertEquals(0, response.getResponseCode());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAlterTableSetFF() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
+                "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
+        hcatDriver.run("desc extended junit_sem_analysis");
+
+        tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddPartFail() throws CommandNeedRetryException {
+
+        driver.run("drop table junit_sem_analysis");
+        driver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location 'README.txt'");
+        assertEquals(0, response.getResponseCode());
+        driver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddPartPass() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
+        CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location '" + TEST_DATA_DIR + "'");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testCTAS() throws CommandNeedRetryException {
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) as select * from tbl2";
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(40000, response.getResponseCode());
+        assertTrue(response.getErrorMessage().contains("FAILED: SemanticException Operation not supported. Create table as Select is not a valid operation."));
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testStoredAs() throws CommandNeedRetryException {
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int)";
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testAddDriverInfo() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as " +
+                "INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
+                "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver' ";
+        assertEquals(0, hcatDriver.run(query).getResponseCode());
+
+        Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+        assertEquals(RCFileInputFormat.class.getName(), tbl.getSd().getInputFormat());
+        assertEquals(RCFileOutputFormat.class.getName(), tbl.getSd().getOutputFormat());
+
+        hcatDriver.run("drop table junit_sem_analysis");
+    }
+
+    @Test
+    public void testInvalidateNonStringPartition() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b int)  stored as RCFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(40000, response.getResponseCode());
+        assertEquals("FAILED: SemanticException Operation not supported. HCatalog only supports partition columns of type string. For column: b Found type: int",
+                response.getErrorMessage());
+
+    }
+
+    @Test
+    public void testInvalidateSeqFileStoredAs() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as SEQUENCEFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+
+    }
+
+    @Test
+    public void testInvalidateTextFileStoredAs() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string)  stored as TEXTFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+
+    }
+
+    @Test
+    public void testInvalidateClusteredBy() throws IOException, CommandNeedRetryException {
+
+        hcatDriver.run("drop table junit_sem_analysis");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
+
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+    }
+
+    @Test
+    public void testCTLFail() throws IOException, CommandNeedRetryException {
+
+        driver.run("drop table junit_sem_analysis");
+        driver.run("drop table like_table");
+        query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
+
+        driver.run(query);
+        query = "create table like_table like junit_sem_analysis";
+        CommandProcessorResponse response = hcatDriver.run(query);
+        assertEquals(0, response.getResponseCode());
+    }
+
+    @Test
+    public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException {
+
+        try {
+            hcatDriver.run("drop table junit_sem_analysis");
+        } catch (Exception e) {
+            LOG.error("Error in drop table.", e);
+        }
+        query = "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
+
+        hcatDriver.run(query);
+        String likeTbl = "like_table";
+        hcatDriver.run("drop table " + likeTbl);
+        query = "create table like_table like junit_sem_analysis";
+        CommandProcessorResponse resp = hcatDriver.run(query);
+        assertEquals(0, resp.getResponseCode());
 //    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTbl);
 //    assertEquals(likeTbl,tbl.getTableName());
 //    List<FieldSchema> cols = tbl.getSd().getCols();
@@ -399,7 +398,7 @@ public class TestSemanticAnalysis extend
 //
 //    hcatDriver.run("drop table junit_sem_analysis");
 //    hcatDriver.run("drop table "+likeTbl);
-  }
+    }
 
 // This test case currently fails, since add partitions don't inherit anything from tables.
 

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestUseDatabase.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestUseDatabase.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestUseDatabase.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestUseDatabase.java Mon Sep 10 23:28:55 2012
@@ -31,46 +31,46 @@ import org.apache.hadoop.hive.ql.session
 import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
 
 /* Unit test for GitHub Howl issue #3 */
-public class TestUseDatabase extends TestCase{
+public class TestUseDatabase extends TestCase {
 
-  private Driver hcatDriver;
+    private Driver hcatDriver;
 
-  @Override
-  protected void setUp() throws Exception {
+    @Override
+    protected void setUp() throws Exception {
 
-    HiveConf hcatConf = new HiveConf(this.getClass());
-    hcatConf.set(ConfVars.PREEXECHOOKS.varname, "");
-    hcatConf.set(ConfVars.POSTEXECHOOKS.varname, "");
-    hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+        HiveConf hcatConf = new HiveConf(this.getClass());
+        hcatConf.set(ConfVars.PREEXECHOOKS.varname, "");
+        hcatConf.set(ConfVars.POSTEXECHOOKS.varname, "");
+        hcatConf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
 
-    hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
-    hcatDriver = new Driver(hcatConf);
-    SessionState.start(new CliSessionState(hcatConf));
-  }
+        hcatConf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
+        hcatDriver = new Driver(hcatConf);
+        SessionState.start(new CliSessionState(hcatConf));
+    }
 
-  String query;
-  private final String dbName = "testUseDatabase_db";
-  private final String tblName = "testUseDatabase_tbl";
+    String query;
+    private final String dbName = "testUseDatabase_db";
+    private final String tblName = "testUseDatabase_tbl";
 
-  public void testAlterTablePass() throws IOException, CommandNeedRetryException{
+    public void testAlterTablePass() throws IOException, CommandNeedRetryException {
 
-    hcatDriver.run("create database " + dbName);
-    hcatDriver.run("use " + dbName);
-    hcatDriver.run("create table " + tblName + " (a int) partitioned by (b string) stored as RCFILE");
+        hcatDriver.run("create database " + dbName);
+        hcatDriver.run("use " + dbName);
+        hcatDriver.run("create table " + tblName + " (a int) partitioned by (b string) stored as RCFILE");
 
-    CommandProcessorResponse response;
+        CommandProcessorResponse response;
 
-    response = hcatDriver.run("alter table " + tblName + " add partition (b='2') location '/tmp'");
-    assertEquals(0, response.getResponseCode());
-    assertNull(response.getErrorMessage());
+        response = hcatDriver.run("alter table " + tblName + " add partition (b='2') location '/tmp'");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
 
-    response = hcatDriver.run("alter table " + tblName + " set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
-        "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
-    assertEquals(0, response.getResponseCode());
-    assertNull(response.getErrorMessage());
+        response = hcatDriver.run("alter table " + tblName + " set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
+                "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
+        assertEquals(0, response.getResponseCode());
+        assertNull(response.getErrorMessage());
 
-    hcatDriver.run("drop table " + tblName);
-    hcatDriver.run("drop database " + dbName);
-  }
+        hcatDriver.run("drop table " + tblName);
+        hcatDriver.run("drop database " + dbName);
+    }
 
 }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHCatUtil.java Mon Sep 10 23:28:55 2012
@@ -41,142 +41,142 @@ import org.junit.Test;
 
 public class TestHCatUtil {
 
-  @Test
-  public void testFsPermissionOperation(){
+    @Test
+    public void testFsPermissionOperation() {
 
-    HashMap<String,Integer> permsCode = new HashMap<String,Integer>();
+        HashMap<String, Integer> permsCode = new HashMap<String, Integer>();
 
-    for (int i = 0; i < 8; i++){
-      for (int j = 0; j < 8; j++){
-        for (int k = 0; k < 8; k++){
-          StringBuilder sb = new StringBuilder();
-          sb.append("0");
-          sb.append(i);
-          sb.append(j);
-          sb.append(k);
-          Integer code = (((i*8)+j)*8)+k;
-          String perms = (new FsPermission(Short.decode(sb.toString()))).toString();
-          if (permsCode.containsKey(perms)){
-            Assert.assertEquals("permissions(" + perms + ") mapped to multiple codes", code, permsCode.get(perms));
-          }
-          permsCode.put(perms, code);
-          assertFsPermissionTransformationIsGood(perms);
+        for (int i = 0; i < 8; i++) {
+            for (int j = 0; j < 8; j++) {
+                for (int k = 0; k < 8; k++) {
+                    StringBuilder sb = new StringBuilder();
+                    sb.append("0");
+                    sb.append(i);
+                    sb.append(j);
+                    sb.append(k);
+                    Integer code = (((i * 8) + j) * 8) + k;
+                    String perms = (new FsPermission(Short.decode(sb.toString()))).toString();
+                    if (permsCode.containsKey(perms)) {
+                        Assert.assertEquals("permissions(" + perms + ") mapped to multiple codes", code, permsCode.get(perms));
+                    }
+                    permsCode.put(perms, code);
+                    assertFsPermissionTransformationIsGood(perms);
+                }
+            }
         }
-      }
     }
-  }
 
-  private void assertFsPermissionTransformationIsGood(String perms) {
-    Assert.assertEquals(perms, FsPermission.valueOf("-" + perms).toString());
-  }
-
-  @Test
-  public void testValidateMorePermissive(){
-    assertConsistentFsPermissionBehaviour(FsAction.ALL,true,true,true,true,true,true,true,true);
-    assertConsistentFsPermissionBehaviour(FsAction.READ,false,true,false,true,false,false,false,false);
-    assertConsistentFsPermissionBehaviour(FsAction.WRITE,false,true,false,false,true,false,false,false);
-    assertConsistentFsPermissionBehaviour(FsAction.EXECUTE,false,true,true,false,false,false,false,false);
-    assertConsistentFsPermissionBehaviour(FsAction.READ_EXECUTE,false,true,true,true,false,true,false,false);
-    assertConsistentFsPermissionBehaviour(FsAction.READ_WRITE,false,true,false,true,true,false,true,false);
-    assertConsistentFsPermissionBehaviour(FsAction.WRITE_EXECUTE,false,true,true,false,true,false,false,true);
-    assertConsistentFsPermissionBehaviour(FsAction.NONE,false,true,false,false,false,false,false,false);
-  }
-
-
-  private void assertConsistentFsPermissionBehaviour(
-      FsAction base, boolean versusAll, boolean versusNone,
-      boolean versusX, boolean versusR, boolean versusW,
-      boolean versusRX, boolean versusRW,  boolean versusWX){
-
-    Assert.assertTrue(versusAll == HCatUtil.validateMorePermissive(base, FsAction.ALL));
-    Assert.assertTrue(versusX == HCatUtil.validateMorePermissive(base, FsAction.EXECUTE));
-    Assert.assertTrue(versusNone == HCatUtil.validateMorePermissive(base, FsAction.NONE));
-    Assert.assertTrue(versusR == HCatUtil.validateMorePermissive(base, FsAction.READ));
-    Assert.assertTrue(versusRX == HCatUtil.validateMorePermissive(base, FsAction.READ_EXECUTE));
-    Assert.assertTrue(versusRW == HCatUtil.validateMorePermissive(base, FsAction.READ_WRITE));
-    Assert.assertTrue(versusW == HCatUtil.validateMorePermissive(base, FsAction.WRITE));
-    Assert.assertTrue(versusWX == HCatUtil.validateMorePermissive(base, FsAction.WRITE_EXECUTE));
-  }
-
-  @Test
-  public void testExecutePermissionsCheck(){
-    Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.ALL));
-    Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.NONE));
-    Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.EXECUTE));
-    Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ_EXECUTE));
-    Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.WRITE_EXECUTE));
-
-    Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ));
-    Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.WRITE));
-    Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ_WRITE));
-
-  }
-
-  @Test
-  public void testGetTableSchemaWithPtnColsApi() throws IOException {
-    // Check the schema of a table with one field & no partition keys.
-    StorageDescriptor sd = new StorageDescriptor(
-        Lists.newArrayList(new FieldSchema("username", Constants.STRING_TYPE_NAME, null)),
-        "location", "org.apache.hadoop.mapred.TextInputFormat",
-        "org.apache.hadoop.mapred.TextOutputFormat", false, -1, new SerDeInfo(),
-        new ArrayList<String>(), new ArrayList<Order>(), new HashMap<String, String>());
-    org.apache.hadoop.hive.metastore.api.Table apiTable =
-        new org.apache.hadoop.hive.metastore.api.Table("test_tblname", "test_dbname", "test_owner",
-            0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
-            "viewOriginalText", "viewExpandedText", TableType.EXTERNAL_TABLE.name());
-    Table table = new Table(apiTable);
-
-    List<HCatFieldSchema> expectedHCatSchema =
-        Lists.newArrayList(new HCatFieldSchema("username", HCatFieldSchema.Type.STRING, null));
-
-    Assert.assertEquals(new HCatSchema(expectedHCatSchema),
-        HCatUtil.getTableSchemaWithPtnCols(table));
-
-    // Add a partition key & ensure its reflected in the schema.
-    List<FieldSchema> partitionKeys =
-        Lists.newArrayList(new FieldSchema("dt", Constants.STRING_TYPE_NAME, null));
-    table.getTTable().setPartitionKeys(partitionKeys);
-    expectedHCatSchema.add(new HCatFieldSchema("dt", HCatFieldSchema.Type.STRING, null));
-    Assert.assertEquals(new HCatSchema(expectedHCatSchema),
-        HCatUtil.getTableSchemaWithPtnCols(table));
-  }
-
-  /**
-   * Hive represents tables in two ways:
-   * <ul>
-   *   <li>org.apache.hadoop.hive.metastore.api.Table - exactly whats stored in the metastore</li>
-   *   <li>org.apache.hadoop.hive.ql.metadata.Table - adds business logic over api.Table</li>
-   * </ul>
-   * Here we check SerDe-reported fields are included in the table schema.
-   */
-  @Test
-  public void testGetTableSchemaWithPtnColsSerDeReportedFields() throws IOException {
-    Map<String, String> parameters = Maps.newHashMap();
-    parameters.put(Constants.SERIALIZATION_CLASS,
-        "org.apache.hadoop.hive.serde2.thrift.test.IntString");
-    parameters.put(Constants.SERIALIZATION_FORMAT, "org.apache.thrift.protocol.TBinaryProtocol");
-
-    SerDeInfo serDeInfo = new SerDeInfo(null,
-        "org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer", parameters);
-
-    // StorageDescriptor has an empty list of fields - SerDe will report them.
-    StorageDescriptor sd = new StorageDescriptor(new ArrayList<FieldSchema>(), "location",
-        "org.apache.hadoop.mapred.TextInputFormat", "org.apache.hadoop.mapred.TextOutputFormat",
-        false, -1, serDeInfo, new ArrayList<String>(), new ArrayList<Order>(),
-        new HashMap<String, String>());
-
-    org.apache.hadoop.hive.metastore.api.Table apiTable =
-        new org.apache.hadoop.hive.metastore.api.Table("test_tblname", "test_dbname", "test_owner",
-            0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
-            "viewOriginalText", "viewExpandedText", TableType.EXTERNAL_TABLE.name());
-    Table table = new Table(apiTable);
-
-    List<HCatFieldSchema> expectedHCatSchema = Lists.newArrayList(
-        new HCatFieldSchema("myint", HCatFieldSchema.Type.INT, null),
-        new HCatFieldSchema("mystring", HCatFieldSchema.Type.STRING, null),
-        new HCatFieldSchema("underscore_int", HCatFieldSchema.Type.INT, null));
-
-    Assert.assertEquals(new HCatSchema(expectedHCatSchema),
-        HCatUtil.getTableSchemaWithPtnCols(table));
-  }
+    private void assertFsPermissionTransformationIsGood(String perms) {
+        Assert.assertEquals(perms, FsPermission.valueOf("-" + perms).toString());
+    }
+
+    @Test
+    public void testValidateMorePermissive() {
+        assertConsistentFsPermissionBehaviour(FsAction.ALL, true, true, true, true, true, true, true, true);
+        assertConsistentFsPermissionBehaviour(FsAction.READ, false, true, false, true, false, false, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.WRITE, false, true, false, false, true, false, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.EXECUTE, false, true, true, false, false, false, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.READ_EXECUTE, false, true, true, true, false, true, false, false);
+        assertConsistentFsPermissionBehaviour(FsAction.READ_WRITE, false, true, false, true, true, false, true, false);
+        assertConsistentFsPermissionBehaviour(FsAction.WRITE_EXECUTE, false, true, true, false, true, false, false, true);
+        assertConsistentFsPermissionBehaviour(FsAction.NONE, false, true, false, false, false, false, false, false);
+    }
+
+
+    private void assertConsistentFsPermissionBehaviour(
+            FsAction base, boolean versusAll, boolean versusNone,
+            boolean versusX, boolean versusR, boolean versusW,
+            boolean versusRX, boolean versusRW, boolean versusWX) {
+
+        Assert.assertTrue(versusAll == HCatUtil.validateMorePermissive(base, FsAction.ALL));
+        Assert.assertTrue(versusX == HCatUtil.validateMorePermissive(base, FsAction.EXECUTE));
+        Assert.assertTrue(versusNone == HCatUtil.validateMorePermissive(base, FsAction.NONE));
+        Assert.assertTrue(versusR == HCatUtil.validateMorePermissive(base, FsAction.READ));
+        Assert.assertTrue(versusRX == HCatUtil.validateMorePermissive(base, FsAction.READ_EXECUTE));
+        Assert.assertTrue(versusRW == HCatUtil.validateMorePermissive(base, FsAction.READ_WRITE));
+        Assert.assertTrue(versusW == HCatUtil.validateMorePermissive(base, FsAction.WRITE));
+        Assert.assertTrue(versusWX == HCatUtil.validateMorePermissive(base, FsAction.WRITE_EXECUTE));
+    }
+
+    @Test
+    public void testExecutePermissionsCheck() {
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.ALL));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.NONE));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.EXECUTE));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ_EXECUTE));
+        Assert.assertTrue(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.WRITE_EXECUTE));
+
+        Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ));
+        Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.WRITE));
+        Assert.assertFalse(HCatUtil.validateExecuteBitPresentIfReadOrWrite(FsAction.READ_WRITE));
+
+    }
+
+    @Test
+    public void testGetTableSchemaWithPtnColsApi() throws IOException {
+        // Check the schema of a table with one field & no partition keys.
+        StorageDescriptor sd = new StorageDescriptor(
+                Lists.newArrayList(new FieldSchema("username", Constants.STRING_TYPE_NAME, null)),
+                "location", "org.apache.hadoop.mapred.TextInputFormat",
+                "org.apache.hadoop.mapred.TextOutputFormat", false, -1, new SerDeInfo(),
+                new ArrayList<String>(), new ArrayList<Order>(), new HashMap<String, String>());
+        org.apache.hadoop.hive.metastore.api.Table apiTable =
+                new org.apache.hadoop.hive.metastore.api.Table("test_tblname", "test_dbname", "test_owner",
+                        0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
+                        "viewOriginalText", "viewExpandedText", TableType.EXTERNAL_TABLE.name());
+        Table table = new Table(apiTable);
+
+        List<HCatFieldSchema> expectedHCatSchema =
+                Lists.newArrayList(new HCatFieldSchema("username", HCatFieldSchema.Type.STRING, null));
+
+        Assert.assertEquals(new HCatSchema(expectedHCatSchema),
+                HCatUtil.getTableSchemaWithPtnCols(table));
+
+        // Add a partition key & ensure its reflected in the schema.
+        List<FieldSchema> partitionKeys =
+                Lists.newArrayList(new FieldSchema("dt", Constants.STRING_TYPE_NAME, null));
+        table.getTTable().setPartitionKeys(partitionKeys);
+        expectedHCatSchema.add(new HCatFieldSchema("dt", HCatFieldSchema.Type.STRING, null));
+        Assert.assertEquals(new HCatSchema(expectedHCatSchema),
+                HCatUtil.getTableSchemaWithPtnCols(table));
+    }
+
+    /**
+     * Hive represents tables in two ways:
+     * <ul>
+     *   <li>org.apache.hadoop.hive.metastore.api.Table - exactly whats stored in the metastore</li>
+     *   <li>org.apache.hadoop.hive.ql.metadata.Table - adds business logic over api.Table</li>
+     * </ul>
+     * Here we check SerDe-reported fields are included in the table schema.
+     */
+    @Test
+    public void testGetTableSchemaWithPtnColsSerDeReportedFields() throws IOException {
+        Map<String, String> parameters = Maps.newHashMap();
+        parameters.put(Constants.SERIALIZATION_CLASS,
+                "org.apache.hadoop.hive.serde2.thrift.test.IntString");
+        parameters.put(Constants.SERIALIZATION_FORMAT, "org.apache.thrift.protocol.TBinaryProtocol");
+
+        SerDeInfo serDeInfo = new SerDeInfo(null,
+                "org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer", parameters);
+
+        // StorageDescriptor has an empty list of fields - SerDe will report them.
+        StorageDescriptor sd = new StorageDescriptor(new ArrayList<FieldSchema>(), "location",
+                "org.apache.hadoop.mapred.TextInputFormat", "org.apache.hadoop.mapred.TextOutputFormat",
+                false, -1, serDeInfo, new ArrayList<String>(), new ArrayList<Order>(),
+                new HashMap<String, String>());
+
+        org.apache.hadoop.hive.metastore.api.Table apiTable =
+                new org.apache.hadoop.hive.metastore.api.Table("test_tblname", "test_dbname", "test_owner",
+                        0, 0, 0, sd, new ArrayList<FieldSchema>(), new HashMap<String, String>(),
+                        "viewOriginalText", "viewExpandedText", TableType.EXTERNAL_TABLE.name());
+        Table table = new Table(apiTable);
+
+        List<HCatFieldSchema> expectedHCatSchema = Lists.newArrayList(
+                new HCatFieldSchema("myint", HCatFieldSchema.Type.INT, null),
+                new HCatFieldSchema("mystring", HCatFieldSchema.Type.STRING, null),
+                new HCatFieldSchema("underscore_int", HCatFieldSchema.Type.INT, null));
+
+        Assert.assertEquals(new HCatSchema(expectedHCatSchema),
+                HCatUtil.getTableSchemaWithPtnCols(table));
+    }
 }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHiveClientCache.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHiveClientCache.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHiveClientCache.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/common/TestHiveClientCache.java Mon Sep 10 23:28:55 2012
@@ -34,6 +34,7 @@ import org.apache.hcatalog.cli.SemanticA
 import org.apache.thrift.TException;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
+
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
@@ -74,13 +75,13 @@ public class TestHiveClientCache {
     public void testCacheHit() throws IOException, MetaException, LoginException {
 
         HiveClientCache cache = new HiveClientCache(1000);
-        HiveMetaStoreClient  client = cache.get(hiveConf);
+        HiveMetaStoreClient client = cache.get(hiveConf);
         assertNotNull(client);
         client.close(); // close shouldn't matter
 
         // Setting a non important configuration should return the same client only
         hiveConf.setIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS, 10);
-        HiveMetaStoreClient  client2 = cache.get(hiveConf);
+        HiveMetaStoreClient client2 = cache.get(hiveConf);
         assertNotNull(client2);
         assertEquals(client, client2);
         client2.close();
@@ -89,12 +90,12 @@ public class TestHiveClientCache {
     @Test
     public void testCacheMiss() throws IOException, MetaException, LoginException {
         HiveClientCache cache = new HiveClientCache(1000);
-        HiveMetaStoreClient  client = cache.get(hiveConf);
+        HiveMetaStoreClient client = cache.get(hiveConf);
         assertNotNull(client);
 
         // Set different uri as it is one of the criteria deciding whether to return the same client or not
         hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, " "); // URIs are checked for string equivalence, even spaces make them different
-        HiveMetaStoreClient  client2 = cache.get(hiveConf);
+        HiveMetaStoreClient client2 = cache.get(hiveConf);
         assertNotNull(client2);
         assertNotSame(client, client2);
     }
@@ -106,7 +107,7 @@ public class TestHiveClientCache {
     @Test
     public void testCacheExpiry() throws IOException, MetaException, LoginException, InterruptedException {
         HiveClientCache cache = new HiveClientCache(1);
-        HiveClientCache.CacheableHiveMetaStoreClient client = (HiveClientCache.CacheableHiveMetaStoreClient)cache.get(hiveConf);
+        HiveClientCache.CacheableHiveMetaStoreClient client = (HiveClientCache.CacheableHiveMetaStoreClient) cache.get(hiveConf);
         assertNotNull(client);
 
         Thread.sleep(2500);
@@ -165,9 +166,9 @@ public class TestHiveClientCache {
      */
     @Test
     public void testHMSCBreakability() throws IOException, MetaException, LoginException, TException, AlreadyExistsException,
-        InvalidObjectException, NoSuchObjectException, InterruptedException {
+            InvalidObjectException, NoSuchObjectException, InterruptedException {
         // Setup
-        LocalMetaServer metaServer =  new LocalMetaServer();
+        LocalMetaServer metaServer = new LocalMetaServer();
         metaServer.start();
 
         final HiveClientCache cache = new HiveClientCache(1000);
@@ -253,6 +254,7 @@ public class TestHiveClientCache {
         public HiveConf getHiveConf() {
             return hiveConf;
         }
+
         public void shutDown() {
             System.setSecurityManager(securityManager);
         }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/HCatDataCheckUtil.java Mon Sep 10 23:28:55 2012
@@ -36,78 +36,78 @@ import org.slf4j.LoggerFactory;
  */
 public class HCatDataCheckUtil {
 
-  private static final Logger LOG = LoggerFactory.getLogger(HCatDataCheckUtil.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HCatDataCheckUtil.class);
 
-  public static Driver instantiateDriver(MiniCluster cluster) {
-    HiveConf hiveConf = new HiveConf(HCatDataCheckUtil.class);
-    for (Entry e : cluster.getProperties().entrySet()){
-      hiveConf.set(e.getKey().toString(), e.getValue().toString());
-    }
-    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
-    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-
-    LOG.debug("Hive conf : {}", hiveConf.getAllProperties());
-    Driver driver = new Driver(hiveConf);
-    SessionState.start(new CliSessionState(hiveConf));
-    return driver;
-  }
-
-  public static void generateDataFile(MiniCluster cluster, String fileName) throws IOException {
-    MiniCluster.deleteFile(cluster, fileName);
-    String[] input = new String[50];
-    for(int i = 0; i < 50; i++) {
-      input[i] = (i % 5) + "\t" + i  + "\t" + "_S" + i + "S_";
-    }
-    MiniCluster.createInputFile(cluster, fileName, input);
-  }
-
-  public static void createTable(Driver driver, String tableName, String createTableArgs)
-      throws CommandNeedRetryException, IOException {
-    String createTable = "create table " + tableName + createTableArgs;
-    int retCode = driver.run(createTable).getResponseCode();
-    if(retCode != 0) {
-      throw new IOException("Failed to create table. ["+createTable+"], return code from hive driver : ["+retCode+"]");
-    }
-  }
-
-  public static void dropTable(Driver driver, String tablename) throws IOException, CommandNeedRetryException{
-    driver.run("drop table if exists "+tablename);
-  }
-
-  public static ArrayList<String> formattedRun(Driver driver, String name, String selectCmd)
-      throws CommandNeedRetryException, IOException {
-    driver.run(selectCmd);
-    ArrayList<String> src_values = new ArrayList<String>();
-    driver.getResults(src_values);
-    LOG.info("{} : {}", name, src_values);
-    return src_values;
-  }
-
-
-  public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
-    return (compareRecords(first,second) == 0);
-  }
-
-  public static int compareRecords(HCatRecord first, HCatRecord second) {
-    return compareRecordContents(first.getAll(), second.getAll());
-  }
-
-  public static int compareRecordContents(List<Object> first, List<Object> second) {
-    int mySz = first.size();
-    int urSz = second.size();
-    if(mySz != urSz) {
-      return mySz - urSz;
-    } else {
-      for (int i = 0; i < first.size(); i++) {
-        int c = DataType.compare(first.get(i), second.get(i));
-        if (c != 0) {
-          return c;
+    public static Driver instantiateDriver(MiniCluster cluster) {
+        HiveConf hiveConf = new HiveConf(HCatDataCheckUtil.class);
+        for (Entry e : cluster.getProperties().entrySet()) {
+            hiveConf.set(e.getKey().toString(), e.getValue().toString());
+        }
+        hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+        hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+        hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+        LOG.debug("Hive conf : {}", hiveConf.getAllProperties());
+        Driver driver = new Driver(hiveConf);
+        SessionState.start(new CliSessionState(hiveConf));
+        return driver;
+    }
+
+    public static void generateDataFile(MiniCluster cluster, String fileName) throws IOException {
+        MiniCluster.deleteFile(cluster, fileName);
+        String[] input = new String[50];
+        for (int i = 0; i < 50; i++) {
+            input[i] = (i % 5) + "\t" + i + "\t" + "_S" + i + "S_";
+        }
+        MiniCluster.createInputFile(cluster, fileName, input);
+    }
+
+    public static void createTable(Driver driver, String tableName, String createTableArgs)
+        throws CommandNeedRetryException, IOException {
+        String createTable = "create table " + tableName + createTableArgs;
+        int retCode = driver.run(createTable).getResponseCode();
+        if (retCode != 0) {
+            throw new IOException("Failed to create table. [" + createTable + "], return code from hive driver : [" + retCode + "]");
+        }
+    }
+
+    public static void dropTable(Driver driver, String tablename) throws IOException, CommandNeedRetryException {
+        driver.run("drop table if exists " + tablename);
+    }
+
+    public static ArrayList<String> formattedRun(Driver driver, String name, String selectCmd)
+        throws CommandNeedRetryException, IOException {
+        driver.run(selectCmd);
+        ArrayList<String> src_values = new ArrayList<String>();
+        driver.getResults(src_values);
+        LOG.info("{} : {}", name, src_values);
+        return src_values;
+    }
+
+
+    public static boolean recordsEqual(HCatRecord first, HCatRecord second) {
+        return (compareRecords(first, second) == 0);
+    }
+
+    public static int compareRecords(HCatRecord first, HCatRecord second) {
+        return compareRecordContents(first.getAll(), second.getAll());
+    }
+
+    public static int compareRecordContents(List<Object> first, List<Object> second) {
+        int mySz = first.size();
+        int urSz = second.size();
+        if (mySz != urSz) {
+            return mySz - urSz;
+        } else {
+            for (int i = 0; i < first.size(); i++) {
+                int c = DataType.compare(first.get(i), second.get(i));
+                if (c != 0) {
+                    return c;
+                }
+            }
+            return 0;
         }
-      }
-      return 0;
     }
-  }
 
 
 }



Mime
View raw message