Return-Path: X-Original-To: apmail-hive-commits-archive@www.apache.org Delivered-To: apmail-hive-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id 1F1C717A30 for ; Thu, 5 Mar 2015 03:46:30 +0000 (UTC) Received: (qmail 63495 invoked by uid 500); 5 Mar 2015 03:46:30 -0000 Delivered-To: apmail-hive-commits-archive@hive.apache.org Received: (qmail 63449 invoked by uid 500); 5 Mar 2015 03:46:29 -0000 Mailing-List: contact commits-help@hive.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: hive-dev@hive.apache.org Delivered-To: mailing list commits@hive.apache.org Received: (qmail 63428 invoked by uid 99); 5 Mar 2015 03:46:29 -0000 Received: from eris.apache.org (HELO hades.apache.org) (140.211.11.105) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 05 Mar 2015 03:46:29 +0000 Received: from hades.apache.org (localhost [127.0.0.1]) by hades.apache.org (ASF Mail Server at hades.apache.org) with ESMTP id 95D06AC0044 for ; Thu, 5 Mar 2015 03:46:29 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1664220 [1/2] - in /hive/branches/hbase-metastore: itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/ metastore/src/java/org/apache/hadoop/hive/metastore/hbase/ metastore/src/test/org/apache/hadoop/hive/metastore/hbase/ Date: Thu, 05 Mar 2015 03:46:29 -0000 To: commits@hive.apache.org From: gates@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20150305034629.95D06AC0044@hades.apache.org> Author: gates Date: Thu Mar 5 03:46:28 2015 New Revision: 1664220 URL: http://svn.apache.org/r1664220 Log: HIVE-9677 Implement privileges call in HBaseStore (Alan Gates) Added: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java hive/branches/hbase-metastore/metastore/src/test/org/apache/hadoop/hive/metastore/hbase/TestHBaseUtils.java Modified: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseStore.java hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseUtils.java Modified: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java?rev=1664220&r1=1664219&r2=1664220&view=diff ============================================================================== --- hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java (original) +++ hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestHBaseStoreIntegration.java Thu Mar 5 03:46:28 2015 @@ -24,6 +24,7 @@ import org.apache.hadoop.hbase.HBaseTest import org.apache.hadoop.hbase.client.HConnection; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore; import org.apache.hadoop.hive.metastore.api.BinaryColumnStatsData; import org.apache.hadoop.hive.metastore.api.BooleanColumnStatsData; import org.apache.hadoop.hive.metastore.api.ColumnStatistics; @@ -35,18 +36,29 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.metastore.api.DecimalColumnStatsData; import org.apache.hadoop.hive.metastore.api.DoubleColumnStatsData; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege; +import org.apache.hadoop.hive.metastore.api.HiveObjectRef; +import org.apache.hadoop.hive.metastore.api.HiveObjectType; +import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.LongColumnStatsData; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.hive.metastore.api.PrivilegeBag; +import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.StringColumnStatsData; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.model.MRoleMap; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -74,6 +86,8 @@ public class TestHBaseStoreIntegration { private static HTableInterface partTable; private static HTableInterface dbTable; private static HTableInterface roleTable; + private static HTableInterface globalPrivsTable; + private static HTableInterface principalRoleMapTable; private static Map emptyParameters = new HashMap(); @Rule public ExpectedException thrown = ExpectedException.none(); @@ -96,6 +110,12 @@ public class TestHBaseStoreIntegration { HBaseReadWrite.CATALOG_CF); roleTable = utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING), HBaseReadWrite.CATALOG_CF); + globalPrivsTable = + utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + principalRoleMapTable = + utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); } @AfterClass @@ -111,6 +131,8 @@ public class TestHBaseStoreIntegration { Mockito.when(hconn.getTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable); Mockito.when(hconn.getTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable); Mockito.when(hconn.getTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable); + Mockito.when(hconn.getTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable); + Mockito.when(hconn.getTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable); conf = new HiveConf(); // Turn off caching, as we want to test actual interaction with HBase conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true); @@ -432,58 +454,6 @@ public class TestHBaseStoreIntegration { } } - // TODO - Fix this and the next test. They depend on test execution order and are bogus. - @Test - public void createManyPartitions() throws Exception { - String dbName = "default"; - String tableName = "manyParts"; - int startTime = (int)(System.currentTimeMillis() / 1000); - List cols = new ArrayList(); - cols.add(new FieldSchema("col1", "int", "nocomment")); - SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); - StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, - serde, null, null, emptyParameters); - List partCols = new ArrayList(); - partCols.add(new FieldSchema("pc", "string", "")); - Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, partCols, - emptyParameters, null, null, null); - store.createTable(table); - - List partVals = Arrays.asList("alan", "bob", "carl", "doug", "ethan"); - for (String val : partVals) { - List vals = new ArrayList(); - vals.add(val); - StorageDescriptor psd = new StorageDescriptor(sd); - psd.setLocation("file:/tmp/pc=" + val); - Partition part = new Partition(vals, dbName, tableName, startTime, startTime, psd, - emptyParameters); - store.addPartition(part); - - Partition p = store.getPartition(dbName, tableName, vals); - Assert.assertEquals("file:/tmp/pc=" + val, p.getSd().getLocation()); - } - - Assert.assertEquals(2, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); - - } - - @Test - public void createDifferentPartition() throws Exception { - int startTime = (int)(System.currentTimeMillis() / 1000); - Map emptyParameters = new HashMap(); - List cols = new ArrayList(); - cols.add(new FieldSchema("col1", "int", "nocomment")); - SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); - StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input2", "output", false, 0, - serde, null, null, emptyParameters); - Table table = new Table("differenttable", "default", "me", startTime, startTime, 0, sd, null, - emptyParameters, null, null, null); - store.createTable(table); - - Assert.assertEquals(3, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); - - } - @Test public void getPartitions() throws Exception { String dbName = "default"; @@ -685,6 +655,437 @@ public class TestHBaseStoreIntegration { } @Test + public void grantRevokeRoles() throws Exception { + int now = (int)(System.currentTimeMillis()/1000); + String roleName1 = "role1"; + store.addRole(roleName1, "me"); + String roleName2 = "role2"; + store.addRole(roleName2, "me"); + + Role role1 = store.getRole(roleName1); + Role role2 = store.getRole(roleName2); + + store.grantRole(role1, "fred", PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role2, roleName1, PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + store.grantRole(role2, "fred", PrincipalType.USER, "admin", PrincipalType.ROLE, false); + + List maps = store.listRoles("fred", PrincipalType.USER); + Assert.assertEquals(3, maps.size()); + boolean sawRole1 = false, sawRole2 = false, sawPublic = false; + for (MRoleMap map : maps) { + if (map.getRole().getRoleName().equals(roleName1)) { + sawRole1 = true; + Assert.assertEquals("fred", map.getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getPrincipalType()); + Assert.assertTrue(map.getAddTime() >= now); + Assert.assertEquals("bob", map.getGrantor()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getGrantorType()); + Assert.assertFalse(map.getGrantOption()); + } else if (map.getRole().getRoleName().equals(roleName2)) { + sawRole2 = true; + Assert.assertEquals("fred", map.getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getPrincipalType()); + LOG.debug("now " + now + " add time " + map.getAddTime()); + Assert.assertTrue(map.getAddTime() >= now); + Assert.assertEquals("admin", map.getGrantor()); + Assert.assertEquals(PrincipalType.ROLE.toString(), map.getGrantorType()); + Assert.assertFalse(map.getGrantOption()); + } else if (map.getRole().getRoleName().equals(HiveMetaStore.PUBLIC)) { + sawPublic = true; + Assert.assertEquals("fred", map.getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), map.getPrincipalType()); + Assert.assertFalse(map.getGrantOption()); + } else { + Assert.fail("Unknown role name " + map.getRole().getRoleName()); + } + } + Assert.assertTrue(sawRole1 && sawRole2 && sawPublic); + + maps = store.listRoles("fred", PrincipalType.ROLE); + Assert.assertEquals(0, maps.size()); + + maps = store.listRoles(roleName1, PrincipalType.ROLE); + Assert.assertEquals(1, maps.size()); + MRoleMap map = maps.get(0); + Assert.assertEquals(roleName1, map.getPrincipalName()); + Assert.assertEquals(PrincipalType.ROLE.toString(), map.getPrincipalType()); + Assert.assertEquals(roleName2, map.getRole().getRoleName()); + Assert.assertTrue(map.getAddTime() <= now); + Assert.assertEquals("admin", map.getGrantor()); + Assert.assertEquals(PrincipalType.ROLE.toString(), map.getGrantorType()); + Assert.assertTrue(map.getGrantOption()); + + // Test listing all members in a role + maps = store.listRoleMembers(roleName1); + Assert.assertEquals(1, maps.size()); + Assert.assertEquals("fred", maps.get(0).getPrincipalName()); + Assert.assertEquals(PrincipalType.USER.toString(), maps.get(0).getPrincipalType()); + Assert.assertTrue(maps.get(0).getAddTime() >= now); + Assert.assertEquals("bob", maps.get(0).getGrantor()); + Assert.assertEquals(PrincipalType.USER.toString(), maps.get(0).getGrantorType()); + Assert.assertFalse(maps.get(0).getGrantOption()); + + maps = store.listRoleMembers(roleName2); + Assert.assertEquals(2, maps.size()); + boolean sawFred = false; + sawRole1 = false; + for (MRoleMap m : maps) { + if ("fred".equals(m.getPrincipalName())) sawFred = true; + else if (roleName1.equals(m.getPrincipalName())) sawRole1 = true; + else Assert.fail("Unexpected principal " + m.getPrincipalName()); + } + Assert.assertTrue(sawFred && sawRole1); + + // Revoke a role with grant option, make sure it just goes to no grant option + store.revokeRole(role2, roleName1, PrincipalType.ROLE, true); + maps = store.listRoles(roleName1, PrincipalType.ROLE); + Assert.assertEquals(1, maps.size()); + Assert.assertEquals(roleName2, maps.get(0).getRole().getRoleName()); + Assert.assertFalse(maps.get(0).getGrantOption()); + + // Drop a role, make sure it is properly removed from the map + store.removeRole(roleName1); + maps = store.listRoles("fred", PrincipalType.USER); + Assert.assertEquals(2, maps.size()); + sawRole2 = sawPublic = false; + for (MRoleMap m : maps) { + if (m.getRole().getRoleName().equals(roleName2)) sawRole2 = true; + else if (m.getRole().getRoleName().equals(HiveMetaStore.PUBLIC)) sawPublic = true; + else Assert.fail("Unknown role " + m.getRole().getRoleName()); + } + Assert.assertTrue(sawRole2 && sawPublic); + maps = store.listRoles(roleName1, PrincipalType.ROLE); + Assert.assertEquals(0, maps.size()); + + // Revoke a role without grant option, make sure it goes away + store.revokeRole(role2, "fred", PrincipalType.USER, false); + maps = store.listRoles("fred", PrincipalType.USER); + Assert.assertEquals(1, maps.size()); + Assert.assertEquals(HiveMetaStore.PUBLIC, maps.get(0).getRole().getRoleName()); + } + + @Test + public void userToRoleMap() throws Exception { + String roleName1 = "utrm1"; + store.addRole(roleName1, "me"); + String roleName2 = "utrm2"; + store.addRole(roleName2, "me"); + String user1 = "wilma"; + String user2 = "betty"; + + Role role1 = store.getRole(roleName1); + Role role2 = store.getRole(roleName2); + + store.grantRole(role1, user1, PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role1, roleName2, PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + + List roles = HBaseReadWrite.getInstance(conf).getUserRoles(user1); + Assert.assertEquals(2, roles.size()); + String[] roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + store.grantRole(role2, user1, PrincipalType.USER, "admin", PrincipalType.ROLE, false); + store.grantRole(role1, user2, PrincipalType.USER, "bob", PrincipalType.USER, false); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(2, roles.size()); + roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + store.revokeRole(role1, roleName2, PrincipalType.ROLE, false); + + // user1 should still have both roles since she was granted into role1 specifically. user2 + // should only have role2 now since role2 was revoked from role1. + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user1); + Assert.assertEquals(2, roles.size()); + roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(1, roles.size()); + Assert.assertEquals(roleName1, roles.get(0)); + } + + @Test + public void userToRoleMapOnDrop() throws Exception { + String roleName1 = "utrmod1"; + store.addRole(roleName1, "me"); + String roleName2 = "utrmod2"; + store.addRole(roleName2, "me"); + String user1 = "pebbles"; + String user2 = "bam-bam"; + + Role role1 = store.getRole(roleName1); + Role role2 = store.getRole(roleName2); + + store.grantRole(role1, user1, PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role1, roleName2, PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + store.grantRole(role1, user2, PrincipalType.USER, "bob", PrincipalType.USER, false); + + List roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(2, roles.size()); + String[] roleNames = roles.toArray(new String[roles.size()]); + Arrays.sort(roleNames); + Assert.assertArrayEquals(new String[]{roleName1, roleName2}, roleNames); + + store.removeRole(roleName2); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user1); + Assert.assertEquals(1, roles.size()); + Assert.assertEquals(roleName1, roles.get(0)); + + roles = HBaseReadWrite.getInstance(conf).getUserRoles(user2); + Assert.assertEquals(1, roles.size()); + Assert.assertEquals(roleName1, roles.get(0)); + } + + @Test + public void grantRevokeGlobalPrivileges() throws Exception { + doGrantRevoke(HiveObjectType.GLOBAL, null, null, new String[] {"grpg1", "grpg2"}, + new String[] {"bugs", "elmer", "daphy", "wiley"}); + } + + @Test + public void grantRevokeDbPrivileges() throws Exception { + String dbName = "grdbp_db"; + try { + Database db = new Database(dbName, "no description", "file:///tmp", emptyParameters); + store.createDatabase(db); + doGrantRevoke(HiveObjectType.DATABASE, dbName, null, + new String[] {"grdbp_role1", "grdbp_role2"}, + new String[] {"fred", "barney", "wilma", "betty"}); + } finally { + store.dropDatabase(dbName); + } + } + + @Test + public void grantRevokeTablePrivileges() throws Exception { + String dbName = "grtp_db"; + String tableName = "grtp_table"; + try { + Database db = new Database(dbName, "no description", "file:///tmp", emptyParameters); + store.createDatabase(db); + int startTime = (int)(System.currentTimeMillis() / 1000); + List cols = new ArrayList(); + cols.add(new FieldSchema("col1", "int", "nocomment")); + SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); + StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, + serde, null, null, emptyParameters); + Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, null, + emptyParameters, null, null, null); + store.createTable(table); + doGrantRevoke(HiveObjectType.TABLE, dbName, tableName, + new String[] {"grtp_role1", "grtp_role2"}, + new String[] {"batman", "robin", "superman", "wonderwoman"}); + + } finally { + if (store.getTable(dbName, tableName) != null) store.dropTable(dbName, tableName); + store.dropDatabase(dbName); + } + } + + private void doGrantRevoke(HiveObjectType objectType, String dbName, String tableName, + String[] roleNames, String[] userNames) + throws Exception { + store.addRole(roleNames[0], "me"); + store.addRole(roleNames[1], "me"); + int now = (int)(System.currentTimeMillis() / 1000); + + Role role1 = store.getRole(roleNames[0]); + Role role2 = store.getRole(roleNames[1]); + store.grantRole(role1, userNames[0], PrincipalType.USER, "bob", PrincipalType.USER, false); + store.grantRole(role1, roleNames[1], PrincipalType.ROLE, "admin", PrincipalType.ROLE, true); + store.grantRole(role2, userNames[1], PrincipalType.USER, "bob", PrincipalType.USER, false); + + List privileges = new ArrayList(); + HiveObjectRef hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + PrivilegeGrantInfo grantInfo = + new PrivilegeGrantInfo("read", now, "me", PrincipalType.USER, false); + HiveObjectPrivilege hop = new HiveObjectPrivilege(hiveObjRef, userNames[0], PrincipalType.USER, + grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("write", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, roleNames[0], PrincipalType.ROLE, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("exec", now, "me", PrincipalType.USER, false); + hop = new HiveObjectPrivilege(hiveObjRef, roleNames[1], PrincipalType.ROLE, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("create", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("create2", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo); + privileges.add(hop); + + PrivilegeBag pBag = new PrivilegeBag(privileges); + store.grantPrivileges(pBag); + + PrincipalPrivilegeSet pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(1, pps.getUserPrivileges().get(userNames[0]).size()); + grantInfo = pps.getUserPrivileges().get(userNames[0]).get(0); + Assert.assertEquals("read", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + Assert.assertEquals(2, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[0]).get(0); + Assert.assertEquals("write", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertTrue(grantInfo.isGrantOption()); + + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[1]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[1]).get(0); + Assert.assertEquals("exec", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + pps = getPPS(objectType, dbName, tableName, userNames[1]); + + Assert.assertEquals(0, pps.getUserPrivilegesSize()); + + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[1]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[1]).get(0); + Assert.assertEquals("exec", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + pps = getPPS(objectType, dbName, tableName, userNames[2]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(2, pps.getUserPrivileges().get(userNames[2]).size()); + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + + pps = getPPS(objectType, dbName, tableName, userNames[3]); + Assert.assertEquals(0, pps.getUserPrivilegesSize()); + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + + // Test that removing role removes the role grants + store.removeRole(roleNames[1]); + checkRoleRemovedFromAllPrivileges(objectType, dbName, tableName, roleNames[1]); + pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size()); + + pps = getPPS(objectType, dbName, tableName, userNames[1]); + + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + + // Test that revoking with grant option = true just removes grant option + privileges.clear(); + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("write", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, roleNames[0], PrincipalType.ROLE, grantInfo); + privileges.add(hop); + + hiveObjRef = new HiveObjectRef(objectType, dbName, tableName, null, null); + grantInfo = new PrivilegeGrantInfo("create2", now, "me", PrincipalType.USER, true); + hop = new HiveObjectPrivilege(hiveObjRef, userNames[2], PrincipalType.USER, grantInfo); + privileges.add(hop); + + pBag = new PrivilegeBag(privileges); + store.revokePrivileges(pBag, true); + pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivileges().get(roleNames[0]).size()); + grantInfo = pps.getRolePrivileges().get(roleNames[0]).get(0); + Assert.assertEquals("write", grantInfo.getPrivilege()); + Assert.assertTrue(now <= grantInfo.getCreateTime()); + Assert.assertEquals("me", grantInfo.getGrantor()); + Assert.assertEquals(PrincipalType.USER, grantInfo.getGrantorType()); + Assert.assertFalse(grantInfo.isGrantOption()); + + pps = getPPS(objectType, dbName, tableName, userNames[2]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(2, pps.getUserPrivileges().get(userNames[2]).size()); + for (PrivilegeGrantInfo pgi : pps.getUserPrivileges().get(userNames[2])) { + if (pgi.getPrivilege().equals("create")) Assert.assertTrue(pgi.isGrantOption()); + else if (pgi.getPrivilege().equals("create2")) Assert.assertFalse(pgi.isGrantOption()); + else Assert.fail("huh?"); + } + + // Test revoking revokes + store.revokePrivileges(pBag, false); + + pps = getPPS(objectType, dbName, tableName, userNames[0]); + + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(1, pps.getRolePrivilegesSize()); + Assert.assertEquals(0, pps.getRolePrivileges().get(roleNames[0]).size()); + + pps = getPPS(objectType, dbName, tableName, userNames[2]); + Assert.assertEquals(1, pps.getUserPrivilegesSize()); + Assert.assertEquals(1, pps.getUserPrivileges().get(userNames[2]).size()); + Assert.assertEquals("create", pps.getUserPrivileges().get(userNames[2]).get(0).getPrivilege()); + Assert.assertEquals(0, pps.getRolePrivilegesSize()); + } + + private PrincipalPrivilegeSet getPPS(HiveObjectType objectType, String dbName, String tableName, + String userName) + throws InvalidObjectException, MetaException { + switch (objectType) { + case GLOBAL: return store.getUserPrivilegeSet(userName, null); + case DATABASE: return store.getDBPrivilegeSet(dbName, userName, null); + case TABLE: return store.getTablePrivilegeSet(dbName, tableName, userName, null); + default: throw new RuntimeException("huh?"); + } + } + + private void checkRoleRemovedFromAllPrivileges(HiveObjectType objectType, String dbName, + String tableName, String roleName) + throws IOException, NoSuchObjectException, MetaException { + List pgi = null; + switch (objectType) { + case GLOBAL: + pgi = HBaseReadWrite.getInstance(conf).getGlobalPrivs().getRolePrivileges().get(roleName); + break; + + case DATABASE: + pgi = store.getDatabase(dbName).getPrivileges().getRolePrivileges().get(roleName); + break; + + case TABLE: + pgi = store.getTable(dbName, tableName).getPrivileges().getRolePrivileges().get(roleName); + break; + + default: + Assert.fail(); + } + + Assert.assertNull("Expected null for role " + roleName + " for type " + objectType.toString() + + " with db " + dbName + " and table " + tableName, pgi); + + + + } + + @Test public void tableStatistics() throws Exception { long now = System.currentTimeMillis(); String dbname = "default"; Added: hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java?rev=1664220&view=auto ============================================================================== --- hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java (added) +++ hive/branches/hbase-metastore/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/hbase/TestStorageDescriptorSharing.java Thu Mar 5 03:46:28 2015 @@ -0,0 +1,159 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.client.HConnection; +import org.apache.hadoop.hbase.client.HTableInterface; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SerDeInfo; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.mockito.Mock; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Integration tests with HBase Mini-cluster for HBaseStore + */ +public class TestStorageDescriptorSharing { + + private static final Log LOG = LogFactory.getLog(TestHBaseStoreIntegration.class.getName()); + + private static HBaseTestingUtility utility; + private static HTableInterface tblTable; + private static HTableInterface sdTable; + private static HTableInterface partTable; + private static HTableInterface dbTable; + private static HTableInterface roleTable; + private static HTableInterface globalPrivsTable; + private static HTableInterface principalRoleMapTable; + private static Map emptyParameters = new HashMap(); + + @Rule public ExpectedException thrown = ExpectedException.none(); + @Mock private HConnection hconn; + private HBaseStore store; + private HiveConf conf; + + @BeforeClass + public static void startMiniCluster() throws Exception { + utility = new HBaseTestingUtility(); + utility.startMiniCluster(); + byte[][] families = new byte[][] {HBaseReadWrite.CATALOG_CF, HBaseReadWrite.STATS_CF}; + tblTable = utility.createTable(HBaseReadWrite.TABLE_TABLE.getBytes(HBaseUtils.ENCODING), + families); + sdTable = utility.createTable(HBaseReadWrite.SD_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + partTable = utility.createTable(HBaseReadWrite.PART_TABLE.getBytes(HBaseUtils.ENCODING), + families); + dbTable = utility.createTable(HBaseReadWrite.DB_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + roleTable = utility.createTable(HBaseReadWrite.ROLE_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + globalPrivsTable = + utility.createTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + principalRoleMapTable = + utility.createTable(HBaseReadWrite.USER_TO_ROLE_TABLE.getBytes(HBaseUtils.ENCODING), + HBaseReadWrite.CATALOG_CF); + } + + @AfterClass + public static void shutdownMiniCluster() throws Exception { + utility.shutdownMiniCluster(); + } + + @Before + public void setupConnection() throws IOException { + MockitoAnnotations.initMocks(this); + Mockito.when(hconn.getTable(HBaseReadWrite.SD_TABLE)).thenReturn(sdTable); + Mockito.when(hconn.getTable(HBaseReadWrite.TABLE_TABLE)).thenReturn(tblTable); + Mockito.when(hconn.getTable(HBaseReadWrite.PART_TABLE)).thenReturn(partTable); + Mockito.when(hconn.getTable(HBaseReadWrite.DB_TABLE)).thenReturn(dbTable); + Mockito.when(hconn.getTable(HBaseReadWrite.ROLE_TABLE)).thenReturn(roleTable); + Mockito.when(hconn.getTable(HBaseReadWrite.GLOBAL_PRIVS_TABLE)).thenReturn(globalPrivsTable); + Mockito.when(hconn.getTable(HBaseReadWrite.USER_TO_ROLE_TABLE)).thenReturn(principalRoleMapTable); + conf = new HiveConf(); + // Turn off caching, as we want to test actual interaction with HBase + conf.setBoolean(HBaseReadWrite.NO_CACHE_CONF, true); + HBaseReadWrite hbase = HBaseReadWrite.getInstance(conf); + hbase.setConnection(hconn); + store = new HBaseStore(); + store.setConf(conf); + } + + @Test + public void createManyPartitions() throws Exception { + String dbName = "default"; + String tableName = "manyParts"; + int startTime = (int)(System.currentTimeMillis() / 1000); + List cols = new ArrayList(); + cols.add(new FieldSchema("col1", "int", "nocomment")); + SerDeInfo serde = new SerDeInfo("serde", "seriallib", null); + StorageDescriptor sd = new StorageDescriptor(cols, "file:/tmp", "input", "output", false, 0, + serde, null, null, emptyParameters); + List partCols = new ArrayList(); + partCols.add(new FieldSchema("pc", "string", "")); + Table table = new Table(tableName, dbName, "me", startTime, startTime, 0, sd, partCols, + emptyParameters, null, null, null); + store.createTable(table); + + List partVals = Arrays.asList("alan", "bob", "carl", "doug", "ethan"); + for (String val : partVals) { + List vals = new ArrayList(); + vals.add(val); + StorageDescriptor psd = new StorageDescriptor(sd); + psd.setLocation("file:/tmp/pc=" + val); + Partition part = new Partition(vals, dbName, tableName, startTime, startTime, psd, + emptyParameters); + store.addPartition(part); + + Partition p = store.getPartition(dbName, tableName, vals); + Assert.assertEquals("file:/tmp/pc=" + val, p.getSd().getLocation()); + } + + Assert.assertEquals(1, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); + + sd = new StorageDescriptor(cols, "file:/tmp", "input2", "output", false, 0, + serde, null, null, emptyParameters); + table = new Table("differenttable", "default", "me", startTime, startTime, 0, sd, null, + emptyParameters, null, null, null); + store.createTable(table); + + Assert.assertEquals(2, HBaseReadWrite.getInstance(conf).countStorageDescriptor()); + + } +} Modified: hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java?rev=1664220&r1=1664219&r2=1664220&view=diff ============================================================================== --- hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java (original) +++ hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/DatabaseWritable.java Thu Mar 5 03:46:28 2015 @@ -47,7 +47,7 @@ class DatabaseWritable implements Writab HBaseUtils.writeStrStrMap(out, db.getParameters()); HBaseUtils.writePrivileges(out, db.getPrivileges()); HBaseUtils.writeStr(out, db.getOwnerName()); - HBaseUtils.writePrincipalType(out, db.getOwnerType()); + HBaseUtils.writeEnum(out, db.getOwnerType()); } @Override Added: hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java?rev=1664220&view=auto ============================================================================== --- hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java (added) +++ hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoList.java Thu Mar 5 03:46:28 2015 @@ -0,0 +1,70 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.hadoop.io.Writable; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A class to serialize a list of grant infos. There is not a corresponding thrift object. + */ +public class GrantInfoList implements Writable{ + List grantInfos; + + GrantInfoList() { + grantInfos = new ArrayList(); + } + + GrantInfoList(List infos) { + grantInfos = infos; + } + + + @Override + public void write(DataOutput out) throws IOException { + if (grantInfos == null) { + out.writeInt(0); + } else { + out.writeInt(grantInfos.size()); + for (GrantInfoWritable info : grantInfos) { + info.write(out); + } + } + } + + @Override + public void readFields(DataInput in) throws IOException { + int size = in.readInt(); + if (size == 0) { + grantInfos = new ArrayList(); + } else { + grantInfos = new ArrayList(size); + for (int i = 0; i < size; i++) { + GrantInfoWritable info = new GrantInfoWritable(); + info.readFields(in); + grantInfos.add(info); + } + } + } +} Added: hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java?rev=1664220&view=auto ============================================================================== --- hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java (added) +++ hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/GrantInfoWritable.java Thu Mar 5 03:46:28 2015 @@ -0,0 +1,82 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hive.metastore.hbase; + +import org.apache.hadoop.hive.metastore.api.PrincipalType; +import org.apache.hadoop.io.Writable; + +import java.io.DataInput; +import java.io.DataOutput; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A class to serialize grant information. There is not a corresponding thrift object. + */ +class GrantInfoWritable implements Writable { + String principalName; + PrincipalType principalType; + int addTime; + String grantor; + PrincipalType grantorType; + boolean grantOption; + + GrantInfoWritable() { + } + + /** + * + * @param name name of the user or role + * @param type whether this is a user or a role + * @param addTime time user was added to role + * @param grantor user or role who granted this principal into the role + * @param grantorType whether the grantor was a user or a role + * @param withGrantOption whether this principal has the grant option + */ + GrantInfoWritable(String name, PrincipalType type, int addTime, String grantor, + PrincipalType grantorType, boolean withGrantOption) { + principalName = name; + principalType = type; + this.addTime = addTime; + this.grantor = grantor; + this.grantorType = grantorType; + grantOption = withGrantOption; + } + + @Override + public void write(DataOutput out) throws IOException { + HBaseUtils.writeStr(out, principalName); + out.writeInt(principalType.getValue()); + out.writeInt(addTime); + HBaseUtils.writeStr(out, grantor); + out.writeInt(grantorType.getValue()); + out.writeBoolean(grantOption); + } + + @Override + public void readFields(DataInput in) throws IOException { + principalName = HBaseUtils.readStr(in); + principalType = PrincipalType.findByValue(in.readInt()); + addTime = in.readInt(); + grantor = HBaseUtils.readStr(in); + grantorType = PrincipalType.findByValue(in.readInt()); + grantOption = in.readBoolean(); + } +} Modified: hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java URL: http://svn.apache.org/viewvc/hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java?rev=1664220&r1=1664219&r2=1664220&view=diff ============================================================================== --- hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java (original) +++ hive/branches/hbase-metastore/metastore/src/java/org/apache/hadoop/hive/metastore/hbase/HBaseReadWrite.java Thu Mar 5 03:46:28 2015 @@ -19,7 +19,6 @@ package org.apache.hadoop.hive.metastore.hbase; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -49,10 +48,15 @@ import org.apache.hadoop.hive.metastore. import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet; +import org.apache.hadoop.hive.metastore.api.PrincipalType; import org.apache.hadoop.hive.metastore.api.Role; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.io.Writable; +import java.io.DataInput; +import java.io.DataOutput; import java.io.IOException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; @@ -62,9 +66,11 @@ import java.util.Arrays; import java.util.Collection; import java.util.Deque; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; /** @@ -73,21 +79,23 @@ import java.util.Map; class HBaseReadWrite { @VisibleForTesting final static String DB_TABLE = "DBS"; + @VisibleForTesting final static String GLOBAL_PRIVS_TABLE = "GLOBAL_PRIVS"; @VisibleForTesting final static String PART_TABLE = "PARTITIONS"; @VisibleForTesting final static String ROLE_TABLE = "ROLES"; @VisibleForTesting final static String SD_TABLE = "SDS"; @VisibleForTesting final static String TABLE_TABLE = "TBLS"; + @VisibleForTesting final static String USER_TO_ROLE_TABLE = "USER_TO_ROLE"; @VisibleForTesting final static byte[] CATALOG_CF = "c".getBytes(HBaseUtils.ENCODING); @VisibleForTesting final static byte[] STATS_CF = "s".getBytes(HBaseUtils.ENCODING); @VisibleForTesting final static String NO_CACHE_CONF = "no.use.cache"; private final static byte[] CATALOG_COL = "cat".getBytes(HBaseUtils.ENCODING); + private final static byte[] ROLES_COL = "roles".getBytes(HBaseUtils.ENCODING); private final static byte[] REF_COUNT_COL = "ref".getBytes(HBaseUtils.ENCODING); + private final static byte[] GLOBAL_PRIVS_KEY = "globalprivs".getBytes(HBaseUtils.ENCODING); private final static int TABLES_TO_CACHE = 10; - // TODO Add privileges as a second column in the CATALOG_CF - - private final static String[] tableNames = { DB_TABLE, PART_TABLE, ROLE_TABLE, SD_TABLE, - TABLE_TABLE }; + private final static String[] tableNames = { DB_TABLE, GLOBAL_PRIVS_TABLE, PART_TABLE, + USER_TO_ROLE_TABLE, ROLE_TABLE, SD_TABLE, TABLE_TABLE }; static final private Log LOG = LogFactory.getLog(HBaseReadWrite.class.getName()); private static ThreadLocal self = new ThreadLocal() { @@ -121,6 +129,11 @@ class HBaseReadWrite { private Counter sdMisses; private Counter sdOverflows; private List counters; + // roleCache doesn't use ObjectCache because I don't want to limit the size. I am assuming + // that the number of roles will always be small (< 100) so caching the whole thing should not + // be painful. + private Map roleCache; + boolean entireRoleTableInCache; /** * Get the instance of HBaseReadWrite for the current thread. This is intended to be used by @@ -199,6 +212,9 @@ class HBaseReadWrite { partCache = new PartitionCache(totalObjectsToCache / 2, partHits, partMisses, partOverflows); statsCache = StatsCache.getInstance(conf); } + + roleCache = new HashMap(); + entireRoleTableInCache = false; } // Synchronize this so not everyone's doing it at once. @@ -222,6 +238,10 @@ class HBaseReadWrite { } } + /********************************************************************************************** + * Transaction related methods + *********************************************************************************************/ + /** * Begin a transaction */ @@ -245,6 +265,10 @@ class HBaseReadWrite { conn.close(); } + /********************************************************************************************** + * Database related methods + *********************************************************************************************/ + /** * Fetch a database object * @param name name of the database to fetch @@ -307,6 +331,37 @@ class HBaseReadWrite { flush(); } + /********************************************************************************************** + * Global privilege related methods + *********************************************************************************************/ + + /** + * Fetch the global privileges object + * @return + * @throws IOException + */ + PrincipalPrivilegeSet getGlobalPrivs() throws IOException { + byte[] key = GLOBAL_PRIVS_KEY; + byte[] serialized = read(GLOBAL_PRIVS_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + return HBaseUtils.readPrivileges(serialized); + } + + /** + * Store the global privileges object + * @throws IOException + */ + void putGlobalPrivs(PrincipalPrivilegeSet privs) throws IOException { + byte[] key = GLOBAL_PRIVS_KEY; + byte[] serialized = HBaseUtils.writePrivileges(privs); + store(GLOBAL_PRIVS_TABLE, key, CATALOG_CF, CATALOG_COL, serialized); + flush(); + } + + /********************************************************************************************** + * Partition related methods + *********************************************************************************************/ + /** * Fetch one partition * @param dbName database table is in @@ -483,6 +538,335 @@ class HBaseReadWrite { flush(); } + private Partition getPartition(String dbName, String tableName, List partVals, + boolean populateCache) throws IOException { + Partition cached = partCache.get(dbName, tableName, partVals); + if (cached != null) return cached; + byte[] key = buildPartitionKey(dbName, tableName, partVals); + byte[] serialized = read(PART_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + PartitionWritable part = new PartitionWritable(); + HBaseUtils.deserialize(part, serialized); + if (populateCache) partCache.put(dbName, tableName, part.part); + return part.part; + } + + + private List scanPartitions(byte[] keyPrefix, byte[] colFam, byte[] colName, + int maxResults) throws IOException { + return scanPartitionsWithFilter(keyPrefix, colFam, colName, maxResults, null); + } + + private List scanPartitionsWithFilter(byte[] keyPrefix, byte[] colFam, byte[] colName, + int maxResults, Filter filter) + throws IOException { + Iterator iter = + scanWithFilter(PART_TABLE, keyPrefix, colFam, colName, filter); + List parts = new ArrayList(); + int numToFetch = maxResults < 0 ? Integer.MAX_VALUE : maxResults; + for (int i = 0; i < numToFetch && iter.hasNext(); i++) { + PartitionWritable p = new PartitionWritable(); + HBaseUtils.deserialize(p, iter.next().getValue(colFam, colName)); + parts.add(p.part); + } + return parts; + } + + private byte[] buildPartitionKey(String dbName, String tableName, List partVals) { + Deque keyParts = new ArrayDeque(partVals); + keyParts.addFirst(tableName); + keyParts.addFirst(dbName); + return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); + } + + private byte[] buildPartitionKey(PartitionWritable part) throws IOException { + Deque keyParts = new ArrayDeque(part.part.getValues()); + keyParts.addFirst(part.part.getTableName()); + keyParts.addFirst(part.part.getDbName()); + return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); + } + + /********************************************************************************************** + * Role related methods + *********************************************************************************************/ + + /** + * Fetch the list of all roles for a user + * @param userName name of the user + * @return the list of all roles this user participates in + * @throws IOException + */ + List getUserRoles(String userName) throws IOException { + byte[] key = HBaseUtils.buildKey(userName); + byte[] serialized = read(USER_TO_ROLE_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + RoleList roles = new RoleList(); + HBaseUtils.deserialize(roles, serialized); + return roles.roles; + } + + /** + * Find all roles directly participated in by a given principal. This builds the role cache + * because it assumes that subsequent calls may be made to find roles participated in indirectly. + * @param name username or role name + * @param type user or role + * @return map of role name to grant info for all roles directly participated in. + */ + Map getPrincipalDirectRoles(String name, PrincipalType type) + throws IOException { + buildRoleCache(); + + Map directRoles = new HashMap(); + for (Map.Entry e : roleCache.entrySet()) { + for (GrantInfoWritable giw : e.getValue().grantInfos) { + if (giw.principalType == type && giw.principalName.equals(name)) { + directRoles.put(e.getKey(), giw); + break; + } + } + } + return directRoles; + } + + /** + * Fetch all roles and users included directly in a given role. + * @param roleName name of the principal + * @return a list of all roles included in this role + * @throws IOException + */ + GrantInfoList getRolePrincipals(String roleName) throws IOException, NoSuchObjectException { + GrantInfoList rolePrincipals = roleCache.get(roleName); + if (rolePrincipals != null) return rolePrincipals; + byte[] key = HBaseUtils.buildKey(roleName); + byte[] serialized = read(ROLE_TABLE, key, CATALOG_CF, ROLES_COL); + if (serialized == null) return null; + rolePrincipals = new GrantInfoList(); + HBaseUtils.deserialize(rolePrincipals, serialized); + roleCache.put(roleName, rolePrincipals); + return rolePrincipals; + } + + /** + * Given a role, find all users who are either directly or indirectly participate in this role. + * This is expensive, it should be used sparingly. It scan the entire userToRole table and + * does a linear search on each entry. + * @param roleName name of the role + * @return set of all users in the role + * @throws IOException + * @throws NoSuchObjectException + */ + Set findAllUsersInRole(String roleName) throws IOException { + // Walk the userToRole table and collect every user that matches this role. + Set users = new HashSet(); + Iterator iter = scanWithFilter(USER_TO_ROLE_TABLE, null, CATALOG_CF, CATALOG_COL, null); + while (iter.hasNext()) { + RoleList roleList = new RoleList(); + Result result = iter.next(); + HBaseUtils.deserialize(roleList, result.getValue(CATALOG_CF, CATALOG_COL)); + for (String rn : roleList.roles) { + if (rn.equals(roleName)) { + users.add(new String(result.getRow(), HBaseUtils.ENCODING)); + break; + } + } + } + return users; + } + + /** + * Add a principal to a role. + * @param roleName name of the role to add principal to + * @param grantInfo grant information for this principal. + * @throws java.io.IOException + * @throws NoSuchObjectException + * + */ + void addPrincipalToRole(String roleName, GrantInfoWritable grantInfo) + throws IOException, NoSuchObjectException { + GrantInfoList rolePrincipals = getRolePrincipals(roleName); + if (rolePrincipals == null) { + // Happens the first time a principal is added to a role + rolePrincipals = new GrantInfoList(); + } + rolePrincipals.grantInfos.add(grantInfo); + byte[] key = HBaseUtils.buildKey(roleName); + byte[] serialized = HBaseUtils.serialize(rolePrincipals); + store(ROLE_TABLE, key, CATALOG_CF, ROLES_COL, serialized); + flush(); + roleCache.put(roleName, rolePrincipals); + } + + /** + * Drop a principal from a role. + * @param roleName Name of the role to drop the principal from + * @param principalName name of the principal to drop from the role + * @param type user or role + * @param grantOnly if this is true, just remove the grant option, don't actually remove the + * user from the role. + * @throws NoSuchObjectException + * @throws IOException + */ + void dropPrincipalFromRole(String roleName, String principalName, PrincipalType type, + boolean grantOnly) + throws NoSuchObjectException, IOException { + GrantInfoList rolePrincipals = getRolePrincipals(roleName); + if (rolePrincipals == null) { + // Means there aren't any principals in this role, so probably not a problem. + return; + } + for (int i = 0; i < rolePrincipals.grantInfos.size(); i++) { + if (rolePrincipals.grantInfos.get(i).principalType == type && + rolePrincipals.grantInfos.get(i).principalName.equals(principalName)) { + if (grantOnly) rolePrincipals.grantInfos.get(i).grantOption = false; + else rolePrincipals.grantInfos.remove(i); + break; + } + } + byte[] key = HBaseUtils.buildKey(roleName); + byte[] serialized = HBaseUtils.serialize(rolePrincipals); + store(ROLE_TABLE, key, CATALOG_CF, ROLES_COL, serialized); + flush(); + roleCache.put(roleName, rolePrincipals); + } + + /** + * Rebuild the row for a given user in the USER_TO_ROLE table. This is expensive. It + * should be called as infrequently as possible. + * @param userName name of the user + * @throws IOException + */ + void buildRoleMapForUser(String userName) throws IOException, NoSuchObjectException { + // This is mega ugly. Hopefully we don't have to do this too often. + // First, scan the role table and put it all in memory + buildRoleCache(); + LOG.debug("Building role map for " + userName); + + // Second, find every role the user participates in directly. + Set rolesToAdd = new HashSet(); + Set userSet = new HashSet(); + Set rolesToCheckNext = new HashSet(); + userSet.add(userName); + for (Map.Entry e : roleCache.entrySet()) { + for (GrantInfoWritable grantInfo : e.getValue().grantInfos) { + if (grantInfo.principalType == PrincipalType.USER && userName.equals(grantInfo.principalName)) { + rolesToAdd.add(e.getKey()); + rolesToCheckNext.add(e.getKey()); + LOG.debug("Adding " + e.getKey() + " to list of roles user is in directly"); + break; + } + } + } + + // Third, find every role the user participates in indirectly (that is, they have been + // granted into role X and role Y has been granted into role X). + while (rolesToCheckNext.size() > 0) { + Set tmpRolesToCheckNext = new HashSet(); + for (String roleName : rolesToCheckNext) { + GrantInfoList grantInfos = roleCache.get(roleName); + if (grantInfos == null) continue; // happens when a role contains no grants + for (GrantInfoWritable grantInfo : grantInfos.grantInfos) { + if (grantInfo.principalType == PrincipalType.ROLE && + rolesToAdd.add(grantInfo.principalName)) { + tmpRolesToCheckNext.add(grantInfo.principalName); + LOG.debug("Adding " + grantInfo.principalName + + " to list of roles user is in indirectly"); + } + } + } + rolesToCheckNext = tmpRolesToCheckNext; + } + + byte[] key = HBaseUtils.buildKey(userName); + byte[] serialized = HBaseUtils.serialize(new RoleList(new ArrayList(rolesToAdd))); + store(USER_TO_ROLE_TABLE, key, CATALOG_CF, CATALOG_COL, serialized); + flush(); + } + + /** + * Remove all of the grants for a role. This is not cheap. + * @param roleName + * @throws IOException + */ + void removeRoleGrants(String roleName) throws IOException { + buildRoleCache(); + + List puts = new ArrayList(); + // First, walk the role table and remove any references to this role + for (Map.Entry e : roleCache.entrySet()) { + boolean madeAChange = false; + for (int i = 0; i < e.getValue().grantInfos.size(); i++) { + if (e.getValue().grantInfos.get(i).principalType == PrincipalType.ROLE && + e.getValue().grantInfos.get(i).principalName.equals(roleName)) { + e.getValue().grantInfos.remove(i); + madeAChange = true; + break; + } + } + if (madeAChange) { + Put put = new Put(HBaseUtils.buildKey(e.getKey())); + put.add(CATALOG_CF, ROLES_COL, HBaseUtils.serialize(e.getValue())); + puts.add(put); + roleCache.put(e.getKey(), e.getValue()); + } + } + + if (puts.size() > 0) { + HTableInterface htab = getHTable(ROLE_TABLE); + htab.put(puts); + } + + // Remove any global privileges held by this role + PrincipalPrivilegeSet global = getGlobalPrivs(); + if (global != null && + global.getRolePrivileges() != null && + global.getRolePrivileges().remove(roleName) != null) { + putGlobalPrivs(global); + } + + // Now, walk the db table + puts.clear(); + List dbs = scanDatabases(null); + if (dbs == null) dbs = new ArrayList(); // rare, but can happen + for (Database db : dbs) { + if (db.getPrivileges() != null && + db.getPrivileges().getRolePrivileges() != null && + db.getPrivileges().getRolePrivileges().remove(roleName) != null) { + Put put = new Put(HBaseUtils.buildKey(db.getName())); + put.add(CATALOG_CF, CATALOG_COL, HBaseUtils.serialize(new DatabaseWritable(db))); + puts.add(put); + } + } + + if (puts.size() > 0) { + HTableInterface htab = getHTable(DB_TABLE); + htab.put(puts); + } + + // Finally, walk the table table + puts.clear(); + for (Database db : dbs) { + List tables = scanTables(db.getName(), null); + if (tables != null) { + for (Table table : tables) { + if (table.getPrivileges() != null && + table.getPrivileges().getRolePrivileges() != null && + table.getPrivileges().getRolePrivileges().remove(roleName) != null) { + Put put = new Put(HBaseUtils.buildKey(table.getDbName(), table.getTableName())); + put.add(CATALOG_CF, CATALOG_COL, HBaseUtils.serialize(new TableWritable(table))); + puts.add(put); + } + } + } + } + + if (puts.size() > 0) { + HTableInterface htab = getHTable(TABLE_TABLE); + htab.put(puts); + } + + flush(); + } + /** * Fetch a role * @param roleName name of the role @@ -535,8 +919,48 @@ class HBaseReadWrite { byte[] key = HBaseUtils.buildKey(roleName); delete(ROLE_TABLE, key, null, null); flush(); + roleCache.remove(roleName); } + private static class RoleList implements Writable { + List roles; + + RoleList() { + } + + RoleList(List r) { + roles = r; + } + + @Override + public void write(DataOutput out) throws IOException { + HBaseUtils.writeStrList(out, roles); + } + + @Override + public void readFields(DataInput in) throws IOException { + roles = HBaseUtils.readStrList(in); + } + } + + private void buildRoleCache() throws IOException { + if (!entireRoleTableInCache) { + Iterator roles = scanWithFilter(ROLE_TABLE, null, CATALOG_CF, ROLES_COL, null); + while (roles.hasNext()) { + Result res = roles.next(); + String roleName = new String(res.getRow(), HBaseUtils.ENCODING); + GrantInfoList grantInfos = new GrantInfoList(); + HBaseUtils.deserialize(grantInfos, res.getValue(CATALOG_CF, ROLES_COL)); + roleCache.put(roleName, grantInfos); + } + entireRoleTableInCache = true; + } + } + + /********************************************************************************************** + * Table related methods + *********************************************************************************************/ + /** * Fetch a table object * @param dbName database the table is in @@ -652,6 +1076,24 @@ class HBaseReadWrite { flush(); } + private Table getTable(String dbName, String tableName, boolean populateCache) + throws IOException { + ObjectPair hashKey = new ObjectPair(dbName, tableName); + Table cached = tableCache.get(hashKey); + if (cached != null) return cached; + byte[] key = HBaseUtils.buildKey(dbName, tableName); + byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, CATALOG_COL); + if (serialized == null) return null; + TableWritable table = new TableWritable(); + HBaseUtils.deserialize(table, serialized); + if (populateCache) tableCache.put(hashKey, table.table); + return table.table; + } + + /********************************************************************************************** + * StorageDescriptor related methods + *********************************************************************************************/ + /** * If this serde has already been read, then return it from the cache. If not, read it, then * return it. @@ -741,6 +1183,32 @@ class HBaseReadWrite { throw new IOException("Too many unsuccessful attepts to increment storage counter"); } + private static class ByteArrayWrapper { + byte[] wrapped; + + ByteArrayWrapper(byte[] b) { + wrapped = b; + } + + @Override + public boolean equals(Object other) { + if (other instanceof ByteArrayWrapper) { + return Arrays.equals(((ByteArrayWrapper)other).wrapped, wrapped); + } else { + return false; + } + } + + @Override + public int hashCode() { + return Arrays.hashCode(wrapped); + } + } + + /********************************************************************************************** + * Statistics related methods + *********************************************************************************************/ + /** * Update statistics for one or more columns for a table or a partition. * @param dbName database the table is in @@ -915,6 +1383,34 @@ class HBaseReadWrite { return statsList; } + private static class PartStatsInfo { + ColumnStatistics stats; + String partName; + List colNames; + List partVals; + byte[][] colKeys; + + PartStatsInfo(ColumnStatistics s, List pv, String pn) { + stats = s; partVals = pv; partName = pn; + colNames = new ArrayList(); + colKeys = null; + } + } + + private byte[] getStatisticsKey(String dbName, String tableName, List partVals) { + return partVals == null ? + HBaseUtils.buildKey(dbName, tableName) : + buildPartitionKey(dbName, tableName, partVals); + } + + private String getStatisticsTable(List partVals) { + return partVals == null ? TABLE_TABLE : PART_TABLE; + } + + /********************************************************************************************** + * Cache methods + *********************************************************************************************/ + /** * This should be called whenever a new query is started. */ @@ -926,42 +1422,17 @@ class HBaseReadWrite { tableCache.flush(); sdCache.flush(); partCache.flush(); + flushRoleCache(); } - @VisibleForTesting - int countStorageDescriptor() throws IOException { - ResultScanner scanner = getHTable(SD_TABLE).getScanner(new Scan()); - int cnt = 0; - while (scanner.next() != null) cnt++; - return cnt; + private void flushRoleCache() { + roleCache.clear(); + entireRoleTableInCache = false; } - private Table getTable(String dbName, String tableName, boolean populateCache) - throws IOException { - ObjectPair hashKey = new ObjectPair(dbName, tableName); - Table cached = tableCache.get(hashKey); - if (cached != null) return cached; - byte[] key = HBaseUtils.buildKey(dbName, tableName); - byte[] serialized = read(TABLE_TABLE, key, CATALOG_CF, CATALOG_COL); - if (serialized == null) return null; - TableWritable table = new TableWritable(); - HBaseUtils.deserialize(table, serialized); - if (populateCache) tableCache.put(hashKey, table.table); - return table.table; - } - - private Partition getPartition(String dbName, String tableName, List partVals, - boolean populateCache) throws IOException { - Partition cached = partCache.get(dbName, tableName, partVals); - if (cached != null) return cached; - byte[] key = buildPartitionKey(dbName, tableName, partVals); - byte[] serialized = read(PART_TABLE, key, CATALOG_CF, CATALOG_COL); - if (serialized == null) return null; - PartitionWritable part = new PartitionWritable(); - HBaseUtils.deserialize(part, serialized); - if (populateCache) partCache.put(dbName, tableName, part.part); - return part.part; - } + /********************************************************************************************** + * General access methods + *********************************************************************************************/ private void store(String table, byte[] key, byte[] colFam, byte[] colName, byte[] obj) throws IOException { @@ -1008,26 +1479,6 @@ class HBaseReadWrite { htab.delete(d); } - private List scanPartitions(byte[] keyPrefix, byte[] colFam, byte[] colName, - int maxResults) throws IOException { - return scanPartitionsWithFilter(keyPrefix, colFam, colName, maxResults, null); - } - - private List scanPartitionsWithFilter(byte[] keyPrefix, byte[] colFam, byte[] colName, - int maxResults, Filter filter) - throws IOException { - Iterator iter = - scanWithFilter(PART_TABLE, keyPrefix, colFam, colName, filter); - List parts = new ArrayList(); - int numToFetch = maxResults < 0 ? Integer.MAX_VALUE : maxResults; - for (int i = 0; i < numToFetch && iter.hasNext(); i++) { - PartitionWritable p = new PartitionWritable(); - HBaseUtils.deserialize(p, iter.next().getValue(colFam, colName)); - parts.add(p.part); - } - return parts; - } - private Iterator scanWithFilter(String table, byte[] keyPrefix, byte[] colFam, byte[] colName, Filter filter) throws IOException { HTableInterface htab = getHTable(table); @@ -1069,33 +1520,21 @@ class HBaseReadWrite { for (HTableInterface htab : tables.values()) htab.flushCommits(); } - private byte[] buildPartitionKey(String dbName, String tableName, List partVals) { - Deque keyParts = new ArrayDeque(partVals); - keyParts.addFirst(tableName); - keyParts.addFirst(dbName); - return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); - } - - private byte[] buildPartitionKey(PartitionWritable part) throws IOException { - Deque keyParts = new ArrayDeque(part.part.getValues()); - keyParts.addFirst(part.part.getTableName()); - keyParts.addFirst(part.part.getDbName()); - return HBaseUtils.buildKey(keyParts.toArray(new String[keyParts.size()])); - } - private byte[] hash(byte[] serialized) throws IOException { md.update(serialized); return md.digest(); } - private byte[] getStatisticsKey(String dbName, String tableName, List partVals) { - return partVals == null ? - HBaseUtils.buildKey(dbName, tableName) : - buildPartitionKey(dbName, tableName, partVals); - } + /********************************************************************************************** + * Testing methods and classes + *********************************************************************************************/ - private String getStatisticsTable(List partVals) { - return partVals == null ? TABLE_TABLE : PART_TABLE; + @VisibleForTesting + int countStorageDescriptor() throws IOException { + ResultScanner scanner = getHTable(SD_TABLE).getScanner(new Scan()); + int cnt = 0; + while (scanner.next() != null) cnt++; + return cnt; } /** @@ -1107,42 +1546,6 @@ class HBaseReadWrite { conn = connection; } - private static class ByteArrayWrapper { - byte[] wrapped; - - ByteArrayWrapper(byte[] b) { - wrapped = b; - } - - @Override - public boolean equals(Object other) { - if (other instanceof ByteArrayWrapper) { - return Arrays.equals(((ByteArrayWrapper)other).wrapped, wrapped); - } else { - return false; - } - } - - @Override - public int hashCode() { - return Arrays.hashCode(wrapped); - } - } - - private static class PartStatsInfo { - ColumnStatistics stats; - String partName; - List colNames; - List partVals; - byte[][] colKeys; - - PartStatsInfo(ColumnStatistics s, List pv, String pn) { - stats = s; partVals = pv; partName = pn; - colNames = new ArrayList(); - colKeys = null; - } - } - // For testing without the cache private static class BogusObjectCache extends ObjectCache { static Counter bogus = new Counter("bogus");