incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tra...@apache.org
Subject svn commit: r1383152 [20/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ ...
Date Mon, 10 Sep 2012 23:29:03 GMT
Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java Mon Sep 10 23:28:55 2012
@@ -52,531 +52,531 @@ import org.junit.Before;
 import org.junit.Test;
 
 public class TestHdfsAuthorizationProvider {
-  
-  protected HCatDriver hcatDriver;
-  protected HiveMetaStoreClient msc;
-  protected HiveConf conf;
-  protected String whDir;
-  protected Path whPath;
-  protected FileSystem whFs;
-  protected Warehouse wh;
-  protected Hive hive;
-
-  @Before
-  public void setUp() throws Exception {
-    
-    conf = new HiveConf(this.getClass());
-    conf.set(ConfVars.PREEXECHOOKS.varname, "");
-    conf.set(ConfVars.POSTEXECHOOKS.varname, "");
-    conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
-  
-    conf.set("hive.metastore.local", "true");
-    conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
-    conf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
-    conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, 
-        StorageDelegationAuthorizationProvider.class.getCanonicalName());
-    conf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
-    
-    whDir = System.getProperty("test.warehouse.dir", "/tmp/testhdfsauthorization_wh");
-    conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, whDir);
-    
-    UserGroupInformation ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
-    String username = ShimLoader.getHadoopShims().getShortUserName(ugi); 
-    
-    whPath = new Path(whDir);
-    whFs = whPath.getFileSystem(conf);
-    
-    wh = new Warehouse(conf);
-    hive = Hive.get(conf);
-    
-    //clean up mess in HMS 
-    HcatTestUtils.cleanupHMS(hive, wh, perm700);
-    
-    whFs.delete(whPath, true);
-    whFs.mkdirs(whPath, perm755);
-    
-    SessionState.start(new CliSessionState(conf));
-    hcatDriver = new HCatDriver();
-  }
-
-  @After
-  public void tearDown() throws IOException {
-    whFs.close();
-    hcatDriver.close();
-    Hive.closeCurrent();
-  }
-
-  public Path getDbPath(String dbName) throws MetaException, HiveException {
-    return HcatTestUtils.getDbPath(hive, wh, dbName); 
-  }
-  
-  public Path getTablePath(String dbName, String tableName) throws HiveException {
-    Table table = hive.getTable(dbName, tableName);
-    return table.getPath();
-  }
-
-  public Path getPartPath(String partName, String dbName, String tableName) throws HiveException {
-    return new Path(getTablePath(dbName, tableName), partName);
-  }
-
-  /** Execute the query expecting success*/
-  public void exec(String format, Object ... args) throws Exception {
-    String command = String.format(format, args);
-    CommandProcessorResponse resp = hcatDriver.run(command);
-    Assert.assertEquals(resp.getErrorMessage(), 0, resp.getResponseCode());
-    Assert.assertEquals(resp.getErrorMessage(), null, resp.getErrorMessage());
-  }
-
-  /** Execute the query expecting it to fail with AuthorizationException */
-  public void execFail(String format, Object ... args) throws Exception {
-    String command = String.format(format, args);
-    CommandProcessorResponse resp = hcatDriver.run(command);
-    Assert.assertNotSame(resp.getErrorMessage(), 0, resp.getResponseCode());
-    Assert.assertTrue((resp.getResponseCode() == 40000) || (resp.getResponseCode() == 403));
-    if(resp.getErrorMessage() != null){
-     Assert.assertTrue(resp.getErrorMessage().contains("org.apache.hadoop.security.AccessControlException"));
-    }
-  }
-
-  
-  /** 
-   * Tests whether the warehouse directory is writable by the current user (as defined by Hadoop)
-   */
-  @Test
-  public void testWarehouseIsWritable() throws Exception {
-    Path top = new Path(whPath, "_foobarbaz12_");
-    try {
-      whFs.mkdirs(top);
-    } finally {
-      whFs.delete(top, true);
-    }
-  }
-  
-  @Test
-  public void testShowDatabases() throws Exception {
-    exec("CREATE DATABASE doo");
-    exec("SHOW DATABASES");
-    
-    whFs.setPermission(whPath, perm300); //revoke r
-    execFail("SHOW DATABASES");
-  }
-  
-  @Test
-  public void testDatabaseOps() throws Exception {
-    exec("SHOW TABLES");
-    exec("SHOW TABLE EXTENDED LIKE foo1");
-    
-    whFs.setPermission(whPath, perm700);
-    exec("CREATE DATABASE doo");
-    exec("DESCRIBE DATABASE doo");
-    exec("USE doo");
-    exec("SHOW TABLES");
-    exec("SHOW TABLE EXTENDED LIKE foo1");
-    exec("DROP DATABASE doo");
-    
-    //custom location
-    Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
-    whFs.mkdirs(dbPath, perm700);
-    exec("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
-    exec("DESCRIBE DATABASE doo2", dbPath.toUri());
-    exec("USE doo2");
-    exec("SHOW TABLES");
-    exec("SHOW TABLE EXTENDED LIKE foo1");
-    exec("DROP DATABASE doo2", dbPath.toUri());
-    
-    //custom non-existing location
-    exec("CREATE DATABASE doo3 LOCATION '%s/subpath'", dbPath.toUri());
-  }
-  
-  @Test
-  public void testCreateDatabaseFail1() throws Exception {
-    whFs.setPermission(whPath, perm500);
-    execFail("CREATE DATABASE doo"); //in the default location
-    
-    whFs.setPermission(whPath, perm555);
-    execFail("CREATE DATABASE doo2");
-  }
-
-  @Test
-  public void testCreateDatabaseFail2() throws Exception {
-    //custom location
-    Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
-    
-    whFs.mkdirs(dbPath, perm700);
-    whFs.setPermission(dbPath, perm500);
-    execFail("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
-  }
-  
-  @Test
-  public void testDropDatabaseFail1() throws Exception {
-    whFs.setPermission(whPath, perm700);
-    exec("CREATE DATABASE doo"); //in the default location
-    
-    whFs.setPermission(getDbPath("doo"), perm500); //revoke write
-    execFail("DROP DATABASE doo");
-  }
-  
-  @Test
-  public void testDropDatabaseFail2() throws Exception {
-    //custom location
-    Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
-    
-    whFs.mkdirs(dbPath, perm700);
-    exec("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
-    
-    whFs.setPermission(dbPath, perm500);
-    execFail("DROP DATABASE doo2");
-  }
-  
-  @Test
-  public void testDescSwitchDatabaseFail() throws Exception {
-    whFs.setPermission(whPath, perm700);
-    exec("CREATE DATABASE doo");
-    whFs.setPermission(getDbPath("doo"), perm300); //revoke read
-    execFail("DESCRIBE DATABASE doo");
-    execFail("USE doo");
-    
-    //custom location
-    Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
-    whFs.mkdirs(dbPath, perm700);
-    exec("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
-    whFs.mkdirs(dbPath, perm300); //revoke read
-    execFail("DESCRIBE DATABASE doo2", dbPath.toUri());
-    execFail("USE doo2");
-  }
-  
-  @Test 
-  public void testShowTablesFail() throws Exception {
-    whFs.setPermission(whPath, perm700);
-    exec("CREATE DATABASE doo");
-    exec("USE doo");
-    whFs.setPermission(getDbPath("doo"), perm300); //revoke read
-    execFail("SHOW TABLES");
-    execFail("SHOW TABLE EXTENDED LIKE foo1");
-  }
-  
-  @Test
-  public void testTableOps() throws Exception {
-    //default db
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    exec("DESCRIBE foo1");
-    exec("DROP TABLE foo1");
-    
-    //default db custom location
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm700);
-    exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    exec("DESCRIBE foo2");
-    exec("DROP TABLE foo2");
-    
-    //default db custom non existing location
-    exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s/subpath'", tablePath);
-    exec("DESCRIBE foo3");
-    exec("DROP TABLE foo3");
-    
-    //non default db
-    exec("CREATE DATABASE doo");
-    exec("USE doo");
-    
-    exec("CREATE TABLE foo4 (foo INT) STORED AS RCFILE");
-    exec("DESCRIBE foo4");
-    exec("DROP TABLE foo4");
-    
-    //non-default db custom location
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm700);
-    exec("CREATE EXTERNAL TABLE foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    exec("DESCRIBE foo5");
-    exec("DROP TABLE foo5");
-    
-    //non-default db custom non existing location
-    exec("CREATE EXTERNAL TABLE foo6 (foo INT) STORED AS RCFILE LOCATION '%s/subpath'", tablePath);
-    exec("DESCRIBE foo6");
-    exec("DROP TABLE foo6");
-    
-    exec("DROP TABLE IF EXISTS foo_non_exists");
-    
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    exec("DESCRIBE EXTENDED foo1");
-    exec("DESCRIBE FORMATTED foo1");
-    exec("DESCRIBE foo1.foo");
-    
-    //deep non-existing path for the table
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm700);
-    exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s/a/a/a/'", tablePath);
-  }
-  
-  @Test
-  public void testCreateTableFail1() throws Exception {
-    //default db
-    whFs.mkdirs(whPath, perm500); //revoke w
-    execFail("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-  }
-  
-  @Test
-  public void testCreateTableFail2() throws Exception {
-    //default db custom location
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm500);
-    execFail("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    
-    //default db custom non existing location
-    execFail("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s/subpath'", tablePath);
-  }
-  
-  @Test
-  public void testCreateTableFail3() throws Exception {
-    //non default db
-    exec("CREATE DATABASE doo");
-    whFs.setPermission(getDbPath("doo"), perm500);
-
-    execFail("CREATE TABLE doo.foo4 (foo INT) STORED AS RCFILE");
-    
-    //non-default db custom location, permission to write to tablePath, but not on db path
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm700);
-    exec("USE doo");
-    execFail("CREATE EXTERNAL TABLE foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-  }
-
-  @Test
-  public void testCreateTableFail4() throws Exception {
-    //non default db
-    exec("CREATE DATABASE doo");
-
-    //non-default db custom location
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm500);
-    execFail("CREATE EXTERNAL TABLE doo.foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    
-    //non-default db custom non existing location
-    execFail("CREATE EXTERNAL TABLE doo.foo6 (foo INT) STORED AS RCFILE LOCATION '%s/a/a/a/'", tablePath);
-  }
-  
-  @Test
-  public void testDropTableFail1() throws Exception {
-    //default db
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    whFs.mkdirs(getTablePath("default", "foo1"), perm500); //revoke w
-    execFail("DROP TABLE foo1");
-  }
-  
-  @Test
-  public void testDropTableFail2() throws Exception {
-    //default db custom location
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    whFs.mkdirs(tablePath, perm500);
-    execFail("DROP TABLE foo2");
-  }
-
-  @Test
-  public void testDropTableFail4() throws Exception {
-    //non default db
-    exec("CREATE DATABASE doo");
-
-    //non-default db custom location
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    
-    exec("CREATE EXTERNAL TABLE doo.foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    whFs.mkdirs(tablePath, perm500);
-    exec("USE doo"); //There is no DROP TABLE doo.foo5 support in Hive
-    execFail("DROP TABLE foo5");
-  }
-  
-  @Test
-  public void testDescTableFail() throws Exception {
-    //default db
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    whFs.mkdirs(getTablePath("default", "foo1"), perm300); //revoke read
-    execFail("DESCRIBE foo1");
-    
-    //default db custom location
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm700);
-    exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    whFs.mkdirs(tablePath, perm300); //revoke read
-    execFail("DESCRIBE foo2");
-  }
-  
-  @Test
-  public void testAlterTableRename() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    exec("ALTER TABLE foo1 RENAME TO foo2");
-    
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    exec("ALTER TABLE foo3 RENAME TO foo4");
-  }
-  
-  @Test
-  public void testAlterTableRenameFail() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    whFs.mkdirs(getTablePath("default", "foo1"), perm500); //revoke write
-    execFail("ALTER TABLE foo1 RENAME TO foo2");
-    
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
-    whFs.mkdirs(tablePath, perm500); //revoke write 
-    execFail("ALTER TABLE foo3 RENAME TO foo4");
-  }
-  
-  @Test
-  public void testAlterTableRelocate() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    exec("ALTER TABLE foo1 SET LOCATION '%s'", tablePath.makeQualified(whFs));
-    
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
-    exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'", 
-        tablePath.makeQualified(whFs));
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
-    exec("ALTER TABLE foo3 SET LOCATION '%s'", tablePath.makeQualified(whFs));
-  }
-  
-  @Test
-  public void testAlterTableRelocateFail() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
-    Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
-    whFs.mkdirs(tablePath, perm500); //revoke write
-    execFail("ALTER TABLE foo1 SET LOCATION '%s'", tablePath.makeQualified(whFs));
-    
-    //dont have access to new table loc
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
-    exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'", 
-        tablePath.makeQualified(whFs));
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
-    whFs.mkdirs(tablePath, perm500); //revoke write
-    execFail("ALTER TABLE foo3 SET LOCATION '%s'", tablePath.makeQualified(whFs));
-    
-    //have access to new table loc, but not old table loc 
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable3");
-    exec("CREATE EXTERNAL TABLE foo4 (foo INT) STORED AS RCFILE LOCATION '%s'", 
-        tablePath.makeQualified(whFs));
-    whFs.mkdirs(tablePath, perm500); //revoke write
-    tablePath = new Path(whPath, new Random().nextInt() + "/mytable3");
-    execFail("ALTER TABLE foo4 SET LOCATION '%s'", tablePath.makeQualified(whFs));
-  }
-  
-  @Test
-  public void testAlterTable() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
-    exec("ALTER TABLE foo1 SET TBLPROPERTIES ('foo'='bar')");
-    exec("ALTER TABLE foo1 SET SERDEPROPERTIES ('foo'='bar')");
-    exec("ALTER TABLE foo1 ADD COLUMNS (foo2 INT)");
-  }
-  
-  @Test
-  public void testAddDropPartition() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10')");
-    exec("ALTER TABLE foo1 ADD IF NOT EXISTS PARTITION (b='2010-10-10')");
-    String relPath = new Random().nextInt() + "/mypart";
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-11') LOCATION '%s'", relPath);
-    
-    exec("ALTER TABLE foo1 PARTITION (b='2010-10-10') SET FILEFORMAT RCFILE");
-    
-    exec("ALTER TABLE foo1 PARTITION (b='2010-10-10') SET FILEFORMAT INPUTFORMAT "
-        + "'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT "
-        + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver "
-        + "'mydriver' outputdriver 'yourdriver'");    
-    
-    exec("ALTER TABLE foo1 DROP PARTITION (b='2010-10-10')");
-    exec("ALTER TABLE foo1 DROP PARTITION (b='2010-10-11')");
-  }
-  
-  @Test
-  public void testAddPartitionFail1() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
-    whFs.mkdirs(getTablePath("default", "foo1"), perm500);
-    execFail("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10')");
-  }
-  
-  @Test
-  public void testAddPartitionFail2() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
-    String relPath = new Random().nextInt() + "/mypart";
-    Path partPath = new Path(getTablePath("default", "foo1"), relPath);
-    whFs.mkdirs(partPath, perm500);
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10') LOCATION '%s'", partPath);
-  }
-  
-  @Test
-  public void testDropPartitionFail1() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10')");
-    whFs.mkdirs(getPartPath("b=2010-10-10", "default", "foo1"), perm500);
-    execFail("ALTER TABLE foo1 DROP PARTITION (b='2010-10-10')");
-  }
-
-  @Test
-  public void testDropPartitionFail2() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
-    String relPath = new Random().nextInt() + "/mypart";
-    Path partPath = new Path(getTablePath("default", "foo1"), relPath);
-    whFs.mkdirs(partPath, perm700);
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10') LOCATION '%s'", partPath);
-    whFs.mkdirs(partPath, perm500); //revoke write
-    execFail("ALTER TABLE foo1 DROP PARTITION (b='2010-10-10')");
-  }
-  
-  @Test
-  public void testAlterTableFail() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (boo STRING) STORED AS TEXTFILE");
-    whFs.mkdirs(getTablePath("default", "foo1"), perm500); //revoke write
-    execFail("ALTER TABLE foo1 SET TBLPROPERTIES ('foo'='bar')");
-    execFail("ALTER TABLE foo1 SET SERDEPROPERTIES ('foo'='bar')");
-    execFail("ALTER TABLE foo1 ADD COLUMNS (foo2 INT)");
-  }
-  
-  @Test
-  public void testShowTables() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (boo STRING) STORED AS TEXTFILE");
-    exec("SHOW PARTITIONS foo1");
-    
-    whFs.mkdirs(getTablePath("default", "foo1"), perm300); //revoke read
-    execFail("SHOW PARTITIONS foo1");
-  }
-  
-  @Test
-  public void testAlterTablePartRename() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
-    Path loc = new Path(whPath, new Random().nextInt() + "/mypart");
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16') LOCATION '%s'", loc);
-    exec("ALTER TABLE foo1 PARTITION (b='2010-10-16') RENAME TO PARTITION (b='2010-10-17')");
-  }
-  
-  @Test
-  public void testAlterTablePartRenameFail() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
-    Path loc = new Path(whPath, new Random().nextInt() + "/mypart");
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16') LOCATION '%s'", loc);
-    whFs.setPermission(loc, perm500); //revoke w
-    execFail("ALTER TABLE foo1 PARTITION (b='2010-10-16') RENAME TO PARTITION (b='2010-10-17')");
-  }
-  
-  @Test
-  public void testAlterTablePartRelocate() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16')");
-    Path partPath = new Path(whPath, new Random().nextInt() + "/mypart");
-    exec("ALTER TABLE foo1 PARTITION (b='2010-10-16') SET LOCATION '%s'", partPath.makeQualified(whFs));
-  }
-
-  @Test
-  public void testAlterTablePartRelocateFail() throws Exception {
-    exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
-    
-    Path oldLoc = new Path(whPath, new Random().nextInt() + "/mypart");
-    Path newLoc = new Path(whPath, new Random().nextInt() + "/mypart2");
-    
-    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16') LOCATION '%s'", oldLoc);
-    whFs.mkdirs(oldLoc, perm500);
-    execFail("ALTER TABLE foo1 PARTITION (b='2010-10-16') SET LOCATION '%s'", newLoc.makeQualified(whFs));
-    whFs.mkdirs(oldLoc, perm700);
-    whFs.mkdirs(newLoc, perm500);
-    execFail("ALTER TABLE foo1 PARTITION (b='2010-10-16') SET LOCATION '%s'", newLoc.makeQualified(whFs));
-  }
-  
+
+    protected HCatDriver hcatDriver;
+    protected HiveMetaStoreClient msc;
+    protected HiveConf conf;
+    protected String whDir;
+    protected Path whPath;
+    protected FileSystem whFs;
+    protected Warehouse wh;
+    protected Hive hive;
+
+    @Before
+    public void setUp() throws Exception {
+
+        conf = new HiveConf(this.getClass());
+        conf.set(ConfVars.PREEXECHOOKS.varname, "");
+        conf.set(ConfVars.POSTEXECHOOKS.varname, "");
+        conf.set(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+
+        conf.set("hive.metastore.local", "true");
+        conf.set(ConfVars.SEMANTIC_ANALYZER_HOOK.varname, HCatSemanticAnalyzer.class.getName());
+        conf.setBoolVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED, true);
+        conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
+                StorageDelegationAuthorizationProvider.class.getCanonicalName());
+        conf.set("fs.pfile.impl", "org.apache.hadoop.fs.ProxyLocalFileSystem");
+
+        whDir = System.getProperty("test.warehouse.dir", "/tmp/testhdfsauthorization_wh");
+        conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, whDir);
+
+        UserGroupInformation ugi = ShimLoader.getHadoopShims().getUGIForConf(conf);
+        String username = ShimLoader.getHadoopShims().getShortUserName(ugi);
+
+        whPath = new Path(whDir);
+        whFs = whPath.getFileSystem(conf);
+
+        wh = new Warehouse(conf);
+        hive = Hive.get(conf);
+
+        //clean up mess in HMS
+        HcatTestUtils.cleanupHMS(hive, wh, perm700);
+
+        whFs.delete(whPath, true);
+        whFs.mkdirs(whPath, perm755);
+
+        SessionState.start(new CliSessionState(conf));
+        hcatDriver = new HCatDriver();
+    }
+
+    @After
+    public void tearDown() throws IOException {
+        whFs.close();
+        hcatDriver.close();
+        Hive.closeCurrent();
+    }
+
+    public Path getDbPath(String dbName) throws MetaException, HiveException {
+        return HcatTestUtils.getDbPath(hive, wh, dbName);
+    }
+
+    public Path getTablePath(String dbName, String tableName) throws HiveException {
+        Table table = hive.getTable(dbName, tableName);
+        return table.getPath();
+    }
+
+    public Path getPartPath(String partName, String dbName, String tableName) throws HiveException {
+        return new Path(getTablePath(dbName, tableName), partName);
+    }
+
+    /** Execute the query expecting success*/
+    public void exec(String format, Object... args) throws Exception {
+        String command = String.format(format, args);
+        CommandProcessorResponse resp = hcatDriver.run(command);
+        Assert.assertEquals(resp.getErrorMessage(), 0, resp.getResponseCode());
+        Assert.assertEquals(resp.getErrorMessage(), null, resp.getErrorMessage());
+    }
+
+    /** Execute the query expecting it to fail with AuthorizationException */
+    public void execFail(String format, Object... args) throws Exception {
+        String command = String.format(format, args);
+        CommandProcessorResponse resp = hcatDriver.run(command);
+        Assert.assertNotSame(resp.getErrorMessage(), 0, resp.getResponseCode());
+        Assert.assertTrue((resp.getResponseCode() == 40000) || (resp.getResponseCode() == 403));
+        if (resp.getErrorMessage() != null) {
+            Assert.assertTrue(resp.getErrorMessage().contains("org.apache.hadoop.security.AccessControlException"));
+        }
+    }
+
+
+    /**
+     * Tests whether the warehouse directory is writable by the current user (as defined by Hadoop)
+     */
+    @Test
+    public void testWarehouseIsWritable() throws Exception {
+        Path top = new Path(whPath, "_foobarbaz12_");
+        try {
+            whFs.mkdirs(top);
+        } finally {
+            whFs.delete(top, true);
+        }
+    }
+
+    @Test
+    public void testShowDatabases() throws Exception {
+        exec("CREATE DATABASE doo");
+        exec("SHOW DATABASES");
+
+        whFs.setPermission(whPath, perm300); //revoke r
+        execFail("SHOW DATABASES");
+    }
+
+    @Test
+    public void testDatabaseOps() throws Exception {
+        exec("SHOW TABLES");
+        exec("SHOW TABLE EXTENDED LIKE foo1");
+
+        whFs.setPermission(whPath, perm700);
+        exec("CREATE DATABASE doo");
+        exec("DESCRIBE DATABASE doo");
+        exec("USE doo");
+        exec("SHOW TABLES");
+        exec("SHOW TABLE EXTENDED LIKE foo1");
+        exec("DROP DATABASE doo");
+
+        //custom location
+        Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
+        whFs.mkdirs(dbPath, perm700);
+        exec("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
+        exec("DESCRIBE DATABASE doo2", dbPath.toUri());
+        exec("USE doo2");
+        exec("SHOW TABLES");
+        exec("SHOW TABLE EXTENDED LIKE foo1");
+        exec("DROP DATABASE doo2", dbPath.toUri());
+
+        //custom non-existing location
+        exec("CREATE DATABASE doo3 LOCATION '%s/subpath'", dbPath.toUri());
+    }
+
+    @Test
+    public void testCreateDatabaseFail1() throws Exception {
+        whFs.setPermission(whPath, perm500);
+        execFail("CREATE DATABASE doo"); //in the default location
+
+        whFs.setPermission(whPath, perm555);
+        execFail("CREATE DATABASE doo2");
+    }
+
+    @Test
+    public void testCreateDatabaseFail2() throws Exception {
+        //custom location
+        Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
+
+        whFs.mkdirs(dbPath, perm700);
+        whFs.setPermission(dbPath, perm500);
+        execFail("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
+    }
+
+    @Test
+    public void testDropDatabaseFail1() throws Exception {
+        whFs.setPermission(whPath, perm700);
+        exec("CREATE DATABASE doo"); //in the default location
+
+        whFs.setPermission(getDbPath("doo"), perm500); //revoke write
+        execFail("DROP DATABASE doo");
+    }
+
+    @Test
+    public void testDropDatabaseFail2() throws Exception {
+        //custom location
+        Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
+
+        whFs.mkdirs(dbPath, perm700);
+        exec("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
+
+        whFs.setPermission(dbPath, perm500);
+        execFail("DROP DATABASE doo2");
+    }
+
+    @Test
+    public void testDescSwitchDatabaseFail() throws Exception {
+        whFs.setPermission(whPath, perm700);
+        exec("CREATE DATABASE doo");
+        whFs.setPermission(getDbPath("doo"), perm300); //revoke read
+        execFail("DESCRIBE DATABASE doo");
+        execFail("USE doo");
+
+        //custom location
+        Path dbPath = new Path(whPath, new Random().nextInt() + "/mydb");
+        whFs.mkdirs(dbPath, perm700);
+        exec("CREATE DATABASE doo2 LOCATION '%s'", dbPath.toUri());
+        whFs.mkdirs(dbPath, perm300); //revoke read
+        execFail("DESCRIBE DATABASE doo2", dbPath.toUri());
+        execFail("USE doo2");
+    }
+
+    @Test
+    public void testShowTablesFail() throws Exception {
+        whFs.setPermission(whPath, perm700);
+        exec("CREATE DATABASE doo");
+        exec("USE doo");
+        whFs.setPermission(getDbPath("doo"), perm300); //revoke read
+        execFail("SHOW TABLES");
+        execFail("SHOW TABLE EXTENDED LIKE foo1");
+    }
+
+    @Test
+    public void testTableOps() throws Exception {
+        //default db
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        exec("DESCRIBE foo1");
+        exec("DROP TABLE foo1");
+
+        //default db custom location
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm700);
+        exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+        exec("DESCRIBE foo2");
+        exec("DROP TABLE foo2");
+
+        //default db custom non existing location
+        exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s/subpath'", tablePath);
+        exec("DESCRIBE foo3");
+        exec("DROP TABLE foo3");
+
+        //non default db
+        exec("CREATE DATABASE doo");
+        exec("USE doo");
+
+        exec("CREATE TABLE foo4 (foo INT) STORED AS RCFILE");
+        exec("DESCRIBE foo4");
+        exec("DROP TABLE foo4");
+
+        //non-default db custom location
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm700);
+        exec("CREATE EXTERNAL TABLE foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+        exec("DESCRIBE foo5");
+        exec("DROP TABLE foo5");
+
+        //non-default db custom non existing location
+        exec("CREATE EXTERNAL TABLE foo6 (foo INT) STORED AS RCFILE LOCATION '%s/subpath'", tablePath);
+        exec("DESCRIBE foo6");
+        exec("DROP TABLE foo6");
+
+        exec("DROP TABLE IF EXISTS foo_non_exists");
+
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        exec("DESCRIBE EXTENDED foo1");
+        exec("DESCRIBE FORMATTED foo1");
+        exec("DESCRIBE foo1.foo");
+
+        //deep non-existing path for the table
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm700);
+        exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s/a/a/a/'", tablePath);
+    }
+
+    @Test
+    public void testCreateTableFail1() throws Exception {
+        //default db
+        whFs.mkdirs(whPath, perm500); //revoke w
+        execFail("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+    }
+
+    @Test
+    public void testCreateTableFail2() throws Exception {
+        //default db custom location
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm500);
+        execFail("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+
+        //default db custom non existing location
+        execFail("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s/subpath'", tablePath);
+    }
+
+    @Test
+    public void testCreateTableFail3() throws Exception {
+        //non default db
+        exec("CREATE DATABASE doo");
+        whFs.setPermission(getDbPath("doo"), perm500);
+
+        execFail("CREATE TABLE doo.foo4 (foo INT) STORED AS RCFILE");
+
+        //non-default db custom location, permission to write to tablePath, but not on db path
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm700);
+        exec("USE doo");
+        execFail("CREATE EXTERNAL TABLE foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+    }
+
+    @Test
+    public void testCreateTableFail4() throws Exception {
+        //non default db
+        exec("CREATE DATABASE doo");
+
+        //non-default db custom location
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm500);
+        execFail("CREATE EXTERNAL TABLE doo.foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+
+        //non-default db custom non existing location
+        execFail("CREATE EXTERNAL TABLE doo.foo6 (foo INT) STORED AS RCFILE LOCATION '%s/a/a/a/'", tablePath);
+    }
+
+    @Test
+    public void testDropTableFail1() throws Exception {
+        //default db
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        whFs.mkdirs(getTablePath("default", "foo1"), perm500); //revoke w
+        execFail("DROP TABLE foo1");
+    }
+
+    @Test
+    public void testDropTableFail2() throws Exception {
+        //default db custom location
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+        whFs.mkdirs(tablePath, perm500);
+        execFail("DROP TABLE foo2");
+    }
+
+    @Test
+    public void testDropTableFail4() throws Exception {
+        //non default db
+        exec("CREATE DATABASE doo");
+
+        //non-default db custom location
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+
+        exec("CREATE EXTERNAL TABLE doo.foo5 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+        whFs.mkdirs(tablePath, perm500);
+        exec("USE doo"); //There is no DROP TABLE doo.foo5 support in Hive
+        execFail("DROP TABLE foo5");
+    }
+
+    @Test
+    public void testDescTableFail() throws Exception {
+        //default db
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        whFs.mkdirs(getTablePath("default", "foo1"), perm300); //revoke read
+        execFail("DESCRIBE foo1");
+
+        //default db custom location
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm700);
+        exec("CREATE EXTERNAL TABLE foo2 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+        whFs.mkdirs(tablePath, perm300); //revoke read
+        execFail("DESCRIBE foo2");
+    }
+
+    @Test
+    public void testAlterTableRename() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        exec("ALTER TABLE foo1 RENAME TO foo2");
+
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+        exec("ALTER TABLE foo3 RENAME TO foo4");
+    }
+
+    @Test
+    public void testAlterTableRenameFail() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        whFs.mkdirs(getTablePath("default", "foo1"), perm500); //revoke write
+        execFail("ALTER TABLE foo1 RENAME TO foo2");
+
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'", tablePath);
+        whFs.mkdirs(tablePath, perm500); //revoke write
+        execFail("ALTER TABLE foo3 RENAME TO foo4");
+    }
+
+    @Test
+    public void testAlterTableRelocate() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        exec("ALTER TABLE foo1 SET LOCATION '%s'", tablePath.makeQualified(whFs));
+
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
+        exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'",
+                tablePath.makeQualified(whFs));
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
+        exec("ALTER TABLE foo3 SET LOCATION '%s'", tablePath.makeQualified(whFs));
+    }
+
+    @Test
+    public void testAlterTableRelocateFail() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) STORED AS RCFILE");
+        Path tablePath = new Path(whPath, new Random().nextInt() + "/mytable");
+        whFs.mkdirs(tablePath, perm500); //revoke write
+        execFail("ALTER TABLE foo1 SET LOCATION '%s'", tablePath.makeQualified(whFs));
+
+        //dont have access to new table loc
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
+        exec("CREATE EXTERNAL TABLE foo3 (foo INT) STORED AS RCFILE LOCATION '%s'",
+                tablePath.makeQualified(whFs));
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable2");
+        whFs.mkdirs(tablePath, perm500); //revoke write
+        execFail("ALTER TABLE foo3 SET LOCATION '%s'", tablePath.makeQualified(whFs));
+
+        //have access to new table loc, but not old table loc
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable3");
+        exec("CREATE EXTERNAL TABLE foo4 (foo INT) STORED AS RCFILE LOCATION '%s'",
+                tablePath.makeQualified(whFs));
+        whFs.mkdirs(tablePath, perm500); //revoke write
+        tablePath = new Path(whPath, new Random().nextInt() + "/mytable3");
+        execFail("ALTER TABLE foo4 SET LOCATION '%s'", tablePath.makeQualified(whFs));
+    }
+
+    @Test
+    public void testAlterTable() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
+        exec("ALTER TABLE foo1 SET TBLPROPERTIES ('foo'='bar')");
+        exec("ALTER TABLE foo1 SET SERDEPROPERTIES ('foo'='bar')");
+        exec("ALTER TABLE foo1 ADD COLUMNS (foo2 INT)");
+    }
+
+    @Test
+    public void testAddDropPartition() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10')");
+        exec("ALTER TABLE foo1 ADD IF NOT EXISTS PARTITION (b='2010-10-10')");
+        String relPath = new Random().nextInt() + "/mypart";
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-11') LOCATION '%s'", relPath);
+
+        exec("ALTER TABLE foo1 PARTITION (b='2010-10-10') SET FILEFORMAT RCFILE");
+
+        exec("ALTER TABLE foo1 PARTITION (b='2010-10-10') SET FILEFORMAT INPUTFORMAT "
+                + "'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT "
+                + "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver "
+                + "'mydriver' outputdriver 'yourdriver'");
+
+        exec("ALTER TABLE foo1 DROP PARTITION (b='2010-10-10')");
+        exec("ALTER TABLE foo1 DROP PARTITION (b='2010-10-11')");
+    }
+
+    @Test
+    public void testAddPartitionFail1() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
+        whFs.mkdirs(getTablePath("default", "foo1"), perm500);
+        execFail("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10')");
+    }
+
+    @Test
+    public void testAddPartitionFail2() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
+        String relPath = new Random().nextInt() + "/mypart";
+        Path partPath = new Path(getTablePath("default", "foo1"), relPath);
+        whFs.mkdirs(partPath, perm500);
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10') LOCATION '%s'", partPath);
+    }
+
+    @Test
+    public void testDropPartitionFail1() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10')");
+        whFs.mkdirs(getPartPath("b=2010-10-10", "default", "foo1"), perm500);
+        execFail("ALTER TABLE foo1 DROP PARTITION (b='2010-10-10')");
+    }
+
+    @Test
+    public void testDropPartitionFail2() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS TEXTFILE");
+        String relPath = new Random().nextInt() + "/mypart";
+        Path partPath = new Path(getTablePath("default", "foo1"), relPath);
+        whFs.mkdirs(partPath, perm700);
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10') LOCATION '%s'", partPath);
+        whFs.mkdirs(partPath, perm500); //revoke write
+        execFail("ALTER TABLE foo1 DROP PARTITION (b='2010-10-10')");
+    }
+
+    @Test
+    public void testAlterTableFail() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (boo STRING) STORED AS TEXTFILE");
+        whFs.mkdirs(getTablePath("default", "foo1"), perm500); //revoke write
+        execFail("ALTER TABLE foo1 SET TBLPROPERTIES ('foo'='bar')");
+        execFail("ALTER TABLE foo1 SET SERDEPROPERTIES ('foo'='bar')");
+        execFail("ALTER TABLE foo1 ADD COLUMNS (foo2 INT)");
+    }
+
+    @Test
+    public void testShowTables() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (boo STRING) STORED AS TEXTFILE");
+        exec("SHOW PARTITIONS foo1");
+
+        whFs.mkdirs(getTablePath("default", "foo1"), perm300); //revoke read
+        execFail("SHOW PARTITIONS foo1");
+    }
+
+    @Test
+    public void testAlterTablePartRename() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
+        Path loc = new Path(whPath, new Random().nextInt() + "/mypart");
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16') LOCATION '%s'", loc);
+        exec("ALTER TABLE foo1 PARTITION (b='2010-10-16') RENAME TO PARTITION (b='2010-10-17')");
+    }
+
+    @Test
+    public void testAlterTablePartRenameFail() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
+        Path loc = new Path(whPath, new Random().nextInt() + "/mypart");
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16') LOCATION '%s'", loc);
+        whFs.setPermission(loc, perm500); //revoke w
+        execFail("ALTER TABLE foo1 PARTITION (b='2010-10-16') RENAME TO PARTITION (b='2010-10-17')");
+    }
+
+    @Test
+    public void testAlterTablePartRelocate() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16')");
+        Path partPath = new Path(whPath, new Random().nextInt() + "/mypart");
+        exec("ALTER TABLE foo1 PARTITION (b='2010-10-16') SET LOCATION '%s'", partPath.makeQualified(whFs));
+    }
+
+    @Test
+    public void testAlterTablePartRelocateFail() throws Exception {
+        exec("CREATE TABLE foo1 (foo INT) PARTITIONED BY (b STRING) STORED AS RCFILE");
+
+        Path oldLoc = new Path(whPath, new Random().nextInt() + "/mypart");
+        Path newLoc = new Path(whPath, new Random().nextInt() + "/mypart2");
+
+        exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-16') LOCATION '%s'", oldLoc);
+        whFs.mkdirs(oldLoc, perm500);
+        execFail("ALTER TABLE foo1 PARTITION (b='2010-10-16') SET LOCATION '%s'", newLoc.makeQualified(whFs));
+        whFs.mkdirs(oldLoc, perm700);
+        whFs.mkdirs(newLoc, perm500);
+        execFail("ALTER TABLE foo1 PARTITION (b='2010-10-16') SET LOCATION '%s'", newLoc.makeQualified(whFs));
+    }
+
 }

Modified: incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java (original)
+++ incubator/hcatalog/trunk/storage-handlers/hbase/src/gen-java/org/apache/hcatalog/hbase/snapshot/transaction/thrift/StoreFamilyRevision.java Mon Sep 10 23:28:55 2012
@@ -35,383 +35,382 @@ import java.util.Collections;
 import java.util.BitSet;
 
 public class StoreFamilyRevision implements org.apache.thrift.TBase<StoreFamilyRevision, StoreFamilyRevision._Fields>, java.io.Serializable, Cloneable {
-  private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StoreFamilyRevision");
+    private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("StoreFamilyRevision");
 
-  private static final org.apache.thrift.protocol.TField REVISION_FIELD_DESC = new org.apache.thrift.protocol.TField("revision", org.apache.thrift.protocol.TType.I64, (short)1);
-  private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short)2);
+    private static final org.apache.thrift.protocol.TField REVISION_FIELD_DESC = new org.apache.thrift.protocol.TField("revision", org.apache.thrift.protocol.TType.I64, (short) 1);
+    private static final org.apache.thrift.protocol.TField TIMESTAMP_FIELD_DESC = new org.apache.thrift.protocol.TField("timestamp", org.apache.thrift.protocol.TType.I64, (short) 2);
 
-  public long revision; // required
-  public long timestamp; // required
+    public long revision; // required
+    public long timestamp; // required
 
-  /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
-  public enum _Fields implements org.apache.thrift.TFieldIdEnum {
-    REVISION((short)1, "revision"),
-    TIMESTAMP((short)2, "timestamp");
+    /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+    public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+        REVISION((short) 1, "revision"),
+        TIMESTAMP((short) 2, "timestamp");
+
+        private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+        static {
+            for (_Fields field : EnumSet.allOf(_Fields.class)) {
+                byName.put(field.getFieldName(), field);
+            }
+        }
+
+        /**
+         * Find the _Fields constant that matches fieldId, or null if its not found.
+         */
+        public static _Fields findByThriftId(int fieldId) {
+            switch (fieldId) {
+            case 1: // REVISION
+                return REVISION;
+            case 2: // TIMESTAMP
+                return TIMESTAMP;
+            default:
+                return null;
+            }
+        }
+
+        /**
+         * Find the _Fields constant that matches fieldId, throwing an exception
+         * if it is not found.
+         */
+        public static _Fields findByThriftIdOrThrow(int fieldId) {
+            _Fields fields = findByThriftId(fieldId);
+            if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+            return fields;
+        }
+
+        /**
+         * Find the _Fields constant that matches name, or null if its not found.
+         */
+        public static _Fields findByName(String name) {
+            return byName.get(name);
+        }
+
+        private final short _thriftId;
+        private final String _fieldName;
+
+        _Fields(short thriftId, String fieldName) {
+            _thriftId = thriftId;
+            _fieldName = fieldName;
+        }
+
+        public short getThriftFieldId() {
+            return _thriftId;
+        }
+
+        public String getFieldName() {
+            return _fieldName;
+        }
+    }
+
+    // isset id assignments
+    private static final int __REVISION_ISSET_ID = 0;
+    private static final int __TIMESTAMP_ISSET_ID = 1;
+    private BitSet __isset_bit_vector = new BitSet(2);
 
-    private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+    public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
 
     static {
-      for (_Fields field : EnumSet.allOf(_Fields.class)) {
-        byName.put(field.getFieldName(), field);
-      }
+        Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+        tmpMap.put(_Fields.REVISION, new org.apache.thrift.meta_data.FieldMetaData("revision", org.apache.thrift.TFieldRequirementType.DEFAULT,
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+        tmpMap.put(_Fields.TIMESTAMP, new org.apache.thrift.meta_data.FieldMetaData("timestamp", org.apache.thrift.TFieldRequirementType.DEFAULT,
+            new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+        metaDataMap = Collections.unmodifiableMap(tmpMap);
+        org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(StoreFamilyRevision.class, metaDataMap);
+    }
+
+    public StoreFamilyRevision() {
+    }
+
+    public StoreFamilyRevision(
+        long revision,
+        long timestamp) {
+        this();
+        this.revision = revision;
+        setRevisionIsSet(true);
+        this.timestamp = timestamp;
+        setTimestampIsSet(true);
     }
 
     /**
-     * Find the _Fields constant that matches fieldId, or null if its not found.
+     * Performs a deep copy on <i>other</i>.
      */
-    public static _Fields findByThriftId(int fieldId) {
-      switch(fieldId) {
-        case 1: // REVISION
-          return REVISION;
-        case 2: // TIMESTAMP
-          return TIMESTAMP;
-        default:
-          return null;
-      }
+    public StoreFamilyRevision(StoreFamilyRevision other) {
+        __isset_bit_vector.clear();
+        __isset_bit_vector.or(other.__isset_bit_vector);
+        this.revision = other.revision;
+        this.timestamp = other.timestamp;
     }
 
-    /**
-     * Find the _Fields constant that matches fieldId, throwing an exception
-     * if it is not found.
-     */
-    public static _Fields findByThriftIdOrThrow(int fieldId) {
-      _Fields fields = findByThriftId(fieldId);
-      if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
-      return fields;
+    public StoreFamilyRevision deepCopy() {
+        return new StoreFamilyRevision(this);
     }
 
-    /**
-     * Find the _Fields constant that matches name, or null if its not found.
-     */
-    public static _Fields findByName(String name) {
-      return byName.get(name);
+    @Override
+    public void clear() {
+        setRevisionIsSet(false);
+        this.revision = 0;
+        setTimestampIsSet(false);
+        this.timestamp = 0;
     }
 
-    private final short _thriftId;
-    private final String _fieldName;
+    public long getRevision() {
+        return this.revision;
+    }
 
-    _Fields(short thriftId, String fieldName) {
-      _thriftId = thriftId;
-      _fieldName = fieldName;
-    }
-
-    public short getThriftFieldId() {
-      return _thriftId;
-    }
-
-    public String getFieldName() {
-      return _fieldName;
-    }
-  }
-
-  // isset id assignments
-  private static final int __REVISION_ISSET_ID = 0;
-  private static final int __TIMESTAMP_ISSET_ID = 1;
-  private BitSet __isset_bit_vector = new BitSet(2);
-
-  public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
-  static {
-    Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
-    tmpMap.put(_Fields.REVISION, new org.apache.thrift.meta_data.FieldMetaData("revision", org.apache.thrift.TFieldRequirementType.DEFAULT,
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
-    tmpMap.put(_Fields.TIMESTAMP, new org.apache.thrift.meta_data.FieldMetaData("timestamp", org.apache.thrift.TFieldRequirementType.DEFAULT,
-        new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
-    metaDataMap = Collections.unmodifiableMap(tmpMap);
-    org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(StoreFamilyRevision.class, metaDataMap);
-  }
-
-  public StoreFamilyRevision() {
-  }
-
-  public StoreFamilyRevision(
-    long revision,
-    long timestamp)
-  {
-    this();
-    this.revision = revision;
-    setRevisionIsSet(true);
-    this.timestamp = timestamp;
-    setTimestampIsSet(true);
-  }
-
-  /**
-   * Performs a deep copy on <i>other</i>.
-   */
-  public StoreFamilyRevision(StoreFamilyRevision other) {
-    __isset_bit_vector.clear();
-    __isset_bit_vector.or(other.__isset_bit_vector);
-    this.revision = other.revision;
-    this.timestamp = other.timestamp;
-  }
-
-  public StoreFamilyRevision deepCopy() {
-    return new StoreFamilyRevision(this);
-  }
-
-  @Override
-  public void clear() {
-    setRevisionIsSet(false);
-    this.revision = 0;
-    setTimestampIsSet(false);
-    this.timestamp = 0;
-  }
-
-  public long getRevision() {
-    return this.revision;
-  }
-
-  public StoreFamilyRevision setRevision(long revision) {
-    this.revision = revision;
-    setRevisionIsSet(true);
-    return this;
-  }
-
-  public void unsetRevision() {
-    __isset_bit_vector.clear(__REVISION_ISSET_ID);
-  }
-
-  /** Returns true if field revision is set (has been assigned a value) and false otherwise */
-  public boolean isSetRevision() {
-    return __isset_bit_vector.get(__REVISION_ISSET_ID);
-  }
-
-  public void setRevisionIsSet(boolean value) {
-    __isset_bit_vector.set(__REVISION_ISSET_ID, value);
-  }
-
-  public long getTimestamp() {
-    return this.timestamp;
-  }
-
-  public StoreFamilyRevision setTimestamp(long timestamp) {
-    this.timestamp = timestamp;
-    setTimestampIsSet(true);
-    return this;
-  }
-
-  public void unsetTimestamp() {
-    __isset_bit_vector.clear(__TIMESTAMP_ISSET_ID);
-  }
-
-  /** Returns true if field timestamp is set (has been assigned a value) and false otherwise */
-  public boolean isSetTimestamp() {
-    return __isset_bit_vector.get(__TIMESTAMP_ISSET_ID);
-  }
-
-  public void setTimestampIsSet(boolean value) {
-    __isset_bit_vector.set(__TIMESTAMP_ISSET_ID, value);
-  }
-
-  public void setFieldValue(_Fields field, Object value) {
-    switch (field) {
-    case REVISION:
-      if (value == null) {
-        unsetRevision();
-      } else {
-        setRevision((Long)value);
-      }
-      break;
-
-    case TIMESTAMP:
-      if (value == null) {
-        unsetTimestamp();
-      } else {
-        setTimestamp((Long)value);
-      }
-      break;
-
-    }
-  }
-
-  public Object getFieldValue(_Fields field) {
-    switch (field) {
-    case REVISION:
-      return Long.valueOf(getRevision());
-
-    case TIMESTAMP:
-      return Long.valueOf(getTimestamp());
-
-    }
-    throw new IllegalStateException();
-  }
-
-  /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
-  public boolean isSet(_Fields field) {
-    if (field == null) {
-      throw new IllegalArgumentException();
-    }
-
-    switch (field) {
-    case REVISION:
-      return isSetRevision();
-    case TIMESTAMP:
-      return isSetTimestamp();
-    }
-    throw new IllegalStateException();
-  }
-
-  @Override
-  public boolean equals(Object that) {
-    if (that == null)
-      return false;
-    if (that instanceof StoreFamilyRevision)
-      return this.equals((StoreFamilyRevision)that);
-    return false;
-  }
-
-  public boolean equals(StoreFamilyRevision that) {
-    if (that == null)
-      return false;
-
-    boolean this_present_revision = true;
-    boolean that_present_revision = true;
-    if (this_present_revision || that_present_revision) {
-      if (!(this_present_revision && that_present_revision))
-        return false;
-      if (this.revision != that.revision)
-        return false;
+    public StoreFamilyRevision setRevision(long revision) {
+        this.revision = revision;
+        setRevisionIsSet(true);
+        return this;
     }
 
-    boolean this_present_timestamp = true;
-    boolean that_present_timestamp = true;
-    if (this_present_timestamp || that_present_timestamp) {
-      if (!(this_present_timestamp && that_present_timestamp))
-        return false;
-      if (this.timestamp != that.timestamp)
-        return false;
+    public void unsetRevision() {
+        __isset_bit_vector.clear(__REVISION_ISSET_ID);
+    }
+
+    /** Returns true if field revision is set (has been assigned a value) and false otherwise */
+    public boolean isSetRevision() {
+        return __isset_bit_vector.get(__REVISION_ISSET_ID);
+    }
+
+    public void setRevisionIsSet(boolean value) {
+        __isset_bit_vector.set(__REVISION_ISSET_ID, value);
+    }
+
+    public long getTimestamp() {
+        return this.timestamp;
+    }
+
+    public StoreFamilyRevision setTimestamp(long timestamp) {
+        this.timestamp = timestamp;
+        setTimestampIsSet(true);
+        return this;
+    }
+
+    public void unsetTimestamp() {
+        __isset_bit_vector.clear(__TIMESTAMP_ISSET_ID);
+    }
+
+    /** Returns true if field timestamp is set (has been assigned a value) and false otherwise */
+    public boolean isSetTimestamp() {
+        return __isset_bit_vector.get(__TIMESTAMP_ISSET_ID);
+    }
+
+    public void setTimestampIsSet(boolean value) {
+        __isset_bit_vector.set(__TIMESTAMP_ISSET_ID, value);
+    }
+
+    public void setFieldValue(_Fields field, Object value) {
+        switch (field) {
+        case REVISION:
+            if (value == null) {
+                unsetRevision();
+            } else {
+                setRevision((Long) value);
+            }
+            break;
+
+        case TIMESTAMP:
+            if (value == null) {
+                unsetTimestamp();
+            } else {
+                setTimestamp((Long) value);
+            }
+            break;
+
+        }
     }
 
-    return true;
-  }
+    public Object getFieldValue(_Fields field) {
+        switch (field) {
+        case REVISION:
+            return Long.valueOf(getRevision());
+
+        case TIMESTAMP:
+            return Long.valueOf(getTimestamp());
+
+        }
+        throw new IllegalStateException();
+    }
+
+    /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+    public boolean isSet(_Fields field) {
+        if (field == null) {
+            throw new IllegalArgumentException();
+        }
+
+        switch (field) {
+        case REVISION:
+            return isSetRevision();
+        case TIMESTAMP:
+            return isSetTimestamp();
+        }
+        throw new IllegalStateException();
+    }
+
+    @Override
+    public boolean equals(Object that) {
+        if (that == null)
+            return false;
+        if (that instanceof StoreFamilyRevision)
+            return this.equals((StoreFamilyRevision) that);
+        return false;
+    }
 
-  @Override
-  public int hashCode() {
-    return 0;
-  }
-
-  public int compareTo(StoreFamilyRevision other) {
-    if (!getClass().equals(other.getClass())) {
-      return getClass().getName().compareTo(other.getClass().getName());
-    }
-
-    int lastComparison = 0;
-    StoreFamilyRevision typedOther = (StoreFamilyRevision)other;
-
-    lastComparison = Boolean.valueOf(isSetRevision()).compareTo(typedOther.isSetRevision());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetRevision()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.revision, typedOther.revision);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    lastComparison = Boolean.valueOf(isSetTimestamp()).compareTo(typedOther.isSetTimestamp());
-    if (lastComparison != 0) {
-      return lastComparison;
-    }
-    if (isSetTimestamp()) {
-      lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timestamp, typedOther.timestamp);
-      if (lastComparison != 0) {
-        return lastComparison;
-      }
-    }
-    return 0;
-  }
-
-  public _Fields fieldForId(int fieldId) {
-    return _Fields.findByThriftId(fieldId);
-  }
-
-  public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
-    org.apache.thrift.protocol.TField field;
-    iprot.readStructBegin();
-    while (true)
-    {
-      field = iprot.readFieldBegin();
-      if (field.type == org.apache.thrift.protocol.TType.STOP) {
-        break;
-      }
-      switch (field.id) {
-        case 1: // REVISION
-          if (field.type == org.apache.thrift.protocol.TType.I64) {
-            this.revision = iprot.readI64();
-            setRevisionIsSet(true);
-          } else {
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        case 2: // TIMESTAMP
-          if (field.type == org.apache.thrift.protocol.TType.I64) {
-            this.timestamp = iprot.readI64();
-            setTimestampIsSet(true);
-          } else {
-            org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-          }
-          break;
-        default:
-          org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
-      }
-      iprot.readFieldEnd();
-    }
-    iprot.readStructEnd();
-
-    // check for required fields of primitive type, which can't be checked in the validate method
-    validate();
-  }
-
-  public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
-    validate();
-
-    oprot.writeStructBegin(STRUCT_DESC);
-    oprot.writeFieldBegin(REVISION_FIELD_DESC);
-    oprot.writeI64(this.revision);
-    oprot.writeFieldEnd();
-    oprot.writeFieldBegin(TIMESTAMP_FIELD_DESC);
-    oprot.writeI64(this.timestamp);
-    oprot.writeFieldEnd();
-    oprot.writeFieldStop();
-    oprot.writeStructEnd();
-  }
-
-  @Override
-  public String toString() {
-    StringBuilder sb = new StringBuilder("StoreFamilyRevision(");
-    boolean first = true;
-
-    sb.append("revision:");
-    sb.append(this.revision);
-    first = false;
-    if (!first) sb.append(", ");
-    sb.append("timestamp:");
-    sb.append(this.timestamp);
-    first = false;
-    sb.append(")");
-    return sb.toString();
-  }
-
-  public void validate() throws org.apache.thrift.TException {
-    // check for required fields
-  }
-
-  private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
-    try {
-      write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
-    }
-  }
-
-  private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
-    try {
-      // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
-      __isset_bit_vector = new BitSet(1);
-      read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
-    } catch (org.apache.thrift.TException te) {
-      throw new java.io.IOException(te);
+    public boolean equals(StoreFamilyRevision that) {
+        if (that == null)
+            return false;
+
+        boolean this_present_revision = true;
+        boolean that_present_revision = true;
+        if (this_present_revision || that_present_revision) {
+            if (!(this_present_revision && that_present_revision))
+                return false;
+            if (this.revision != that.revision)
+                return false;
+        }
+
+        boolean this_present_timestamp = true;
+        boolean that_present_timestamp = true;
+        if (this_present_timestamp || that_present_timestamp) {
+            if (!(this_present_timestamp && that_present_timestamp))
+                return false;
+            if (this.timestamp != that.timestamp)
+                return false;
+        }
+
+        return true;
+    }
+
+    @Override
+    public int hashCode() {
+        return 0;
+    }
+
+    public int compareTo(StoreFamilyRevision other) {
+        if (!getClass().equals(other.getClass())) {
+            return getClass().getName().compareTo(other.getClass().getName());
+        }
+
+        int lastComparison = 0;
+        StoreFamilyRevision typedOther = (StoreFamilyRevision) other;
+
+        lastComparison = Boolean.valueOf(isSetRevision()).compareTo(typedOther.isSetRevision());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetRevision()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.revision, typedOther.revision);
+            if (lastComparison != 0) {
+                return lastComparison;
+            }
+        }
+        lastComparison = Boolean.valueOf(isSetTimestamp()).compareTo(typedOther.isSetTimestamp());
+        if (lastComparison != 0) {
+            return lastComparison;
+        }
+        if (isSetTimestamp()) {
+            lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.timestamp, typedOther.timestamp);
+            if (lastComparison != 0) {
+                return lastComparison;
+            }
+        }
+        return 0;
+    }
+
+    public _Fields fieldForId(int fieldId) {
+        return _Fields.findByThriftId(fieldId);
+    }
+
+    public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+        org.apache.thrift.protocol.TField field;
+        iprot.readStructBegin();
+        while (true) {
+            field = iprot.readFieldBegin();
+            if (field.type == org.apache.thrift.protocol.TType.STOP) {
+                break;
+            }
+            switch (field.id) {
+            case 1: // REVISION
+                if (field.type == org.apache.thrift.protocol.TType.I64) {
+                    this.revision = iprot.readI64();
+                    setRevisionIsSet(true);
+                } else {
+                    org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+                }
+                break;
+            case 2: // TIMESTAMP
+                if (field.type == org.apache.thrift.protocol.TType.I64) {
+                    this.timestamp = iprot.readI64();
+                    setTimestampIsSet(true);
+                } else {
+                    org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+                }
+                break;
+            default:
+                org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+            }
+            iprot.readFieldEnd();
+        }
+        iprot.readStructEnd();
+
+        // check for required fields of primitive type, which can't be checked in the validate method
+        validate();
+    }
+
+    public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+        validate();
+
+        oprot.writeStructBegin(STRUCT_DESC);
+        oprot.writeFieldBegin(REVISION_FIELD_DESC);
+        oprot.writeI64(this.revision);
+        oprot.writeFieldEnd();
+        oprot.writeFieldBegin(TIMESTAMP_FIELD_DESC);
+        oprot.writeI64(this.timestamp);
+        oprot.writeFieldEnd();
+        oprot.writeFieldStop();
+        oprot.writeStructEnd();
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder sb = new StringBuilder("StoreFamilyRevision(");
+        boolean first = true;
+
+        sb.append("revision:");
+        sb.append(this.revision);
+        first = false;
+        if (!first) sb.append(", ");
+        sb.append("timestamp:");
+        sb.append(this.timestamp);
+        first = false;
+        sb.append(")");
+        return sb.toString();
+    }
+
+    public void validate() throws org.apache.thrift.TException {
+        // check for required fields
+    }
+
+    private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+        try {
+            write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
+        }
+    }
+
+    private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+        try {
+            // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+            __isset_bit_vector = new BitSet(1);
+            read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+        } catch (org.apache.thrift.TException te) {
+            throw new java.io.IOException(te);
+        }
     }
-  }
 
 }
 



Mime
View raw message