From commits-return-12598-archive-asf-public=cust-asf.ponee.io@sentry.apache.org Tue May 29 20:06:24 2018 Return-Path: X-Original-To: archive-asf-public@cust-asf.ponee.io Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx-eu-01.ponee.io (Postfix) with SMTP id CE85A180648 for ; Tue, 29 May 2018 20:06:21 +0200 (CEST) Received: (qmail 36096 invoked by uid 500); 29 May 2018 18:06:20 -0000 Mailing-List: contact commits-help@sentry.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@sentry.apache.org Delivered-To: mailing list commits@sentry.apache.org Received: (qmail 36085 invoked by uid 99); 29 May 2018 18:06:20 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 29 May 2018 18:06:20 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id 8EB26E0BE8; Tue, 29 May 2018 18:06:20 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: spena@apache.org To: commits@sentry.apache.org Date: Tue, 29 May 2018 18:06:20 -0000 Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: [01/43] sentry git commit: SENTRY-2208: Refactor out Sentry service into own module from sentry-provider-db (Anthony Young-Garner, reviewed by Sergio Pena, Steve Moist, Na Li) Repository: sentry Updated Branches: refs/heads/master 97f666345 -> b97f5c7aa http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/SentryWebMetricParser.java ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/SentryWebMetricParser.java b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/SentryWebMetricParser.java new file mode 100644 index 0000000..8446d95 --- /dev/null +++ b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/SentryWebMetricParser.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.service.thrift; + +import org.codehaus.jackson.JsonNode; +import org.codehaus.jackson.map.ObjectMapper; + +import java.io.IOException; + + +/** + * The SentryWebMetricParser is used to parse the metrics displayed on the sentry web ui. + */ +public class SentryWebMetricParser { + private JsonNode root; + private final String sentryService = SentryService.class.getName(); + private ObjectMapper mapper; + + public SentryWebMetricParser(String response) throws IOException { + this.mapper = new ObjectMapper(); + this.root = mapper.readTree(response); + } + + public void refreshRoot(String response) throws IOException { + root = mapper.readTree(response); + } + + public JsonNode getRoot() { + return root; + } + + public JsonNode getGauges(JsonNode root) { + JsonNode gauges = root.findPath("gauges"); + return gauges; + } + + public JsonNode getCounters(JsonNode root) { + JsonNode counters = root.findPath("counters"); + return counters; + } + + public JsonNode getHistograms(JsonNode root) { + JsonNode histograms = root.findPath("histograms"); + return histograms; + } + + public JsonNode getMeters(JsonNode root) { + JsonNode meters = root.findPath("meters"); + return meters; + } + + public JsonNode getTimers(JsonNode root) { + JsonNode timers = root.findPath("timers"); + return timers; + } + + public JsonNode getValue(JsonNode node) { + return node.findPath("value"); + } + + public boolean isHA() { + JsonNode gauges = getGauges(root); + JsonNode obj = getValue(gauges.findPath(sentryService + ".is_ha")); + return obj.getValueAsBoolean(); + } + + public boolean isActive() { + JsonNode gauges = getGauges(root); + JsonNode obj = getValue(gauges.findPath(sentryService + ".is_active")); + return obj.getBooleanValue(); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java new file mode 100644 index 0000000..589acbe --- /dev/null +++ b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateInitializer.java @@ -0,0 +1,346 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.service.thrift; + +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.thrift.TException; +import org.junit.Assert; +import org.junit.Test; +import org.mockito.Mockito; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class TestFullUpdateInitializer { + + private static Configuration conf = new Configuration(); + + static { + conf.setInt(org.apache.sentry.hdfs.ServiceConstants.ServerConfig + .SENTRY_HDFS_SYNC_METASTORE_CACHE_MAX_PART_PER_RPC, 1); + conf.setInt(org.apache.sentry.hdfs.ServiceConstants.ServerConfig + .SENTRY_HDFS_SYNC_METASTORE_CACHE_MAX_TABLES_PER_RPC, 1); + conf.setInt(org.apache.sentry.hdfs.ServiceConstants.ServerConfig + .SENTRY_HDFS_SYNC_METASTORE_CACHE_INIT_THREADS, 8); + } + + /** + * Representation of a Hive table. A table has a name and a list of partitions. + */ + private static class HiveTable { + String name; + List partitions; + + HiveTable(String name) { + this.name = name; + this.partitions = new ArrayList<>(); + } + + HiveTable(String name, List partitions) { + this.name = name; + this.partitions = partitions; + if (this.partitions == null) { + this.partitions = new ArrayList<>(); + } + } + + HiveTable add(String partition) { + partitions.add(partition); + return this; + } + } + + /** + * Representation of a Hive database. A database has a name and a list of tables + */ + private static class HiveDb { + String name; + Collection tables; + + HiveDb(String name) { + this.name = name; + tables = new ArrayList<>(); + } + + HiveDb(String name, Collection tables) { + this.name = name; + this.tables = tables; + if (this.tables == null) { + this.tables = new ArrayList<>(); + } + } + + void add(HiveTable table) { + this.tables.add(table); + } + } + + /** + * Representation of a full Hive snapshot. A snapshot is collection of databases + */ + private static class HiveSnapshot { + List databases = new ArrayList<>(); + + HiveSnapshot() { + } + + HiveSnapshot(Collection dblist) { + if (dblist != null) { + databases.addAll(dblist); + } + } + + HiveSnapshot add(HiveDb db) { + this.databases.add(db); + return this; + } + } + + /** + * Convert Hive snapshot to mock client that will return proper values + * for the snapshot. + */ + private static class MockClient { + HiveMetaStoreClient client; + + MockClient(HiveSnapshot snapshot) throws TException { + client = Mockito.mock(HiveMetaStoreClient.class); + List dbNames = new ArrayList<>(snapshot.databases.size()); + // Walk over all databases and mock appropriate objects + for (HiveDb mdb: snapshot.databases) { + String dbName = mdb.name; + dbNames.add(dbName); + Database db = makeDb(dbName); + Mockito.when(client.getDatabase(dbName)).thenReturn(db); + List tableNames = new ArrayList<>(mdb.tables.size()); + // Walk over all tables for the database and mock appropriate objects + for (HiveTable table: mdb.tables) { + String tableName = table.name; + tableNames.add(tableName); + Table mockTable = makeTable(dbName, tableName); + Mockito.when(client.getTableObjectsByName(dbName, + Lists.newArrayList(tableName))) + .thenReturn(Lists.newArrayList(mockTable)); + Mockito.when(client.listPartitionNames(dbName, tableName, (short) -1)) + .thenReturn(table.partitions); + // Walk across all partitions and mock appropriate objects + for (String partName: table.partitions) { + Partition p = makePartition(dbName, tableName, partName); + Mockito.when(client.getPartitionsByNames(dbName, tableName, + Lists.newArrayList(partName))) + .thenReturn(Lists.newArrayList(p)); + } + } + Mockito.when(client.getAllTables(dbName)).thenReturn(tableNames); + } + // Return all database names + Mockito.when(client.getAllDatabases()).thenReturn(dbNames); + } + } + + private static class MockHMSClientFactory implements HiveConnectionFactory { + + private final HiveMetaStoreClient mClient; + + private MockHMSClientFactory(MockClient mClient) { + this.mClient = mClient.client; + } + + private MockHMSClientFactory(HiveMetaStoreClient client) { + this.mClient = client; + } + + @Override + public HMSClient connect() throws IOException, InterruptedException, MetaException { + return new HMSClient(mClient); + } + + @Override + public void close() throws Exception { + } + } + + /** + * Create mock database with the given name + * @param name Database name + * @return Mock database object + */ + private static Database makeDb(String name) { + Database db = Mockito.mock(Database.class); + Mockito.when(db.getName()).thenReturn(name); + Mockito.when(db.getLocationUri()).thenReturn("hdfs:///" + name); + return db; + } + + /** + * Create mock table + * @param dbName db for this table + * @param tableName name of the table + * @return mock table object + */ + private static Table makeTable(String dbName, String tableName) { + Table table = Mockito.mock(Table.class); + Mockito.when(table.getDbName()).thenReturn(dbName); + Mockito.when(table.getTableName()).thenReturn(tableName); + StorageDescriptor sd = Mockito.mock(StorageDescriptor.class); + Mockito.when(sd.getLocation()).thenReturn( + String.format("hdfs:///%s/%s", dbName, tableName)); + Mockito.when(table.getSd()).thenReturn(sd); + return table; + } + + /** + * Create mock partition + * @param dbName database for this partition + * @param tableName table for this partition + * @param partName partition name + * @return mock partition object + */ + private static Partition makePartition(String dbName, String tableName, String partName) { + Partition partition = Mockito.mock(Partition.class); + StorageDescriptor sd = Mockito.mock(StorageDescriptor.class); + Mockito.when(sd.getLocation()).thenReturn( + String.format("hdfs:///%s/%s/%s", dbName, tableName, partName)); + Mockito.when(partition.getSd()).thenReturn(sd); + return partition; + } + + @Test + // Test basic operation with small database + public void testSimple() throws Exception { + HiveTable tab21 = new HiveTable("tab21"); + HiveTable tab31 = new HiveTable("tab31").add("part311").add("part312"); + HiveDb db3 = new HiveDb("db3", Lists.newArrayList(tab31)); + HiveDb db2 = new HiveDb("db2", Lists.newArrayList(tab21)); + HiveDb db1 = new HiveDb("db1"); + HiveSnapshot snap = new HiveSnapshot().add(db1).add(db2).add(db3); + MockClient c = new MockClient(snap); + + Map> update; + try(FullUpdateInitializer cacheInitializer = + new FullUpdateInitializer(new MockHMSClientFactory(c), conf)) { + update = cacheInitializer.getFullHMSSnapshot(); + } + Assert.assertEquals(5, update.size()); + Assert.assertEquals(Sets.newHashSet("db1"), update.get("db1")); + Assert.assertEquals(Sets.newHashSet("db2"), update.get("db2")); + Assert.assertEquals(Sets.newHashSet("db3"), update.get("db3")); + Assert.assertEquals(Sets.newHashSet("db2/tab21"), update.get("db2.tab21")); + Assert.assertEquals(Sets.newHashSet("db3/tab31", + "db3/tab31/part311", "db3/tab31/part312"), update.get("db3.tab31")); + } + + @Test + // Test that invalid paths are handled correctly + public void testInvalidPaths() throws Exception { + //Set up mocks: db1.tb1, with tb1 returning a wrong dbname (db2) + Database db1 = makeDb("db1"); + + Table tab1 = Mockito.mock(Table.class); + //Return a wrong db name, so that this triggers an exception + Mockito.when(tab1.getDbName()).thenReturn("db2"); + Mockito.when(tab1.getTableName()).thenReturn("tab1"); + + HiveMetaStoreClient client = Mockito.mock(HiveMetaStoreClient.class); + Mockito.when(client.getAllDatabases()).thenReturn(Lists.newArrayList("db1")); + Mockito.when(client.getDatabase("db1")).thenReturn(db1); + + Table tab12 = Mockito.mock(Table.class); + Mockito.when(tab12.getDbName()).thenReturn("db1"); + Mockito.when(tab12.getTableName()).thenReturn("tab21"); + StorageDescriptor sd21 = Mockito.mock(StorageDescriptor.class); + Mockito.when(sd21.getLocation()).thenReturn("hdfs:///db1/tab21"); + Mockito.when(tab12.getSd()).thenReturn(sd21); + + Mockito.when(client.getTableObjectsByName("db1", + Lists.newArrayList("tab1"))).thenReturn(Lists.newArrayList(tab1)); + Mockito.when(client.getTableObjectsByName("db1", + Lists.newArrayList("tab12"))).thenReturn(Lists.newArrayList(tab12)); + Mockito.when(client.getAllTables("db1")). + thenReturn(Lists.newArrayList("tab1", "tab12")); + + + Map> update; + try(FullUpdateInitializer cacheInitializer = + new FullUpdateInitializer(new MockHMSClientFactory(client), conf)) { + update = cacheInitializer.getFullHMSSnapshot(); + } + Assert.assertEquals(2, update.size()); + Assert.assertEquals(Sets.newHashSet("db1"), update.get("db1")); + Assert.assertEquals(Sets.newHashSet("db1/tab21"), update.get("db1.tab21")); + } + + @Test + // Test handling of a big tables and partitions + public void testBig() throws Exception { + int ndbs = 3; + int ntables = 51; + int nparts = 131; + + HiveSnapshot snap = new HiveSnapshot(); + + for (int i = 0; i < ndbs; i++) { + HiveDb db = new HiveDb("db" + i); + for (int j = 0; j < ntables; j++) { + HiveTable table = new HiveTable("table" + i + j); + for (int k = 0; k < nparts; k++) { + table.add("part" + i + j + k); + } + db.add(table); + } + snap.add(db); + } + MockClient c = new MockClient(snap); + Map> update; + try(FullUpdateInitializer cacheInitializer = + new FullUpdateInitializer(new MockHMSClientFactory(c), conf)) { + update = cacheInitializer.getFullHMSSnapshot(); + } + Assert.assertEquals((ntables * ndbs) + ndbs, update.size()); + for (int i = 0; i < ndbs; i++) { + String dbName = "db" + i; + Assert.assertEquals(Sets.newHashSet(dbName), update.get(dbName)); + + for (int j = 0; j < ntables; j++) { + String tableName = "table" + i + j; + Set values = new HashSet<>(); + values.add(String.format("%s/%s", dbName, tableName)); + for (int k = 0; k < nparts; k++) { + String partName = "part" + i + j + k; + values.add(String.format("%s/%s/%s", dbName, tableName, partName)); + } + String authz = dbName + "." + tableName; + Assert.assertEquals(values, update.get(authz)); + } + } + } + +} http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java new file mode 100644 index 0000000..c6be80d --- /dev/null +++ b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestFullUpdateModifier.java @@ -0,0 +1,482 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.service.thrift; + +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.hadoop.hive.metastore.messaging.MessageDeserializer; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONAddPartitionMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONAlterPartitionMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONAlterTableMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONCreateDatabaseMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONCreateTableMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONDropDatabaseMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONDropPartitionMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONDropTableMessage; +import org.apache.sentry.binding.metastore.messaging.json.SentryJSONMessageDeserializer; +import org.junit.Test; +import org.mockito.Mockito; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType.*; +import static org.junit.Assert.*; + +public class TestFullUpdateModifier { + private static final String SERVER = "s"; + private static final String PRINCIPAL = "p"; + private static final String DB = "Db1"; + private static final String TABLE = "Tab1"; + private static final String AUTH = DB.toLowerCase() + "." + TABLE.toLowerCase(); + private static final String PATH = "foo/bar"; + private static final String LOCATION = uri(PATH); + + private static final Table TABLE_OBJ = new Table(TABLE, DB, "", 0, 0, 0, + buildStorageDescriptor(LOCATION), null, null, "", "", ""); + + /** + * Convert path to HDFS URI + */ + private static final String uri(String path) { + return "hdfs:///" + path; + } + + /** + * Creates a StorageDescriptor using the location as parameter. + * + * @param location The location string for the StorageDescriptor + * @return A StorageDescriptor object + */ + private static StorageDescriptor buildStorageDescriptor(String location) { + return new StorageDescriptor(null, location, "", "", false, 0, null, null, null, null); + } + + /** + * Creates a Table object using the db name, table name and table location as parameters. + * + * @param dbName The database name string. + * @param tableName The table name string. + * @param location The table location string. + * @return A Table object + */ + private static Table buildTable(String dbName, String tableName, String location) { + return new Table(tableName, dbName, "", 0, 0, 0, + buildStorageDescriptor(location), null, null, "", "", ""); + } + + /** + * Test create database event. It should add database and its location. + * As a result we should have entry {"db1": {foo/bar}} + * @throws Exception + */ + @Test + public void testCreateDatabase() throws Exception { + Map> update = new HashMap<>(); + NotificationEvent event = new NotificationEvent(0, 0, CREATE_DATABASE.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONCreateDatabaseMessage message = + new SentryJSONCreateDatabaseMessage(SERVER, PRINCIPAL, DB, 0L, LOCATION); + Mockito.when(deserializer.getCreateDatabaseMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + Map> expected = new HashMap<>(); + expected.put(DB.toLowerCase(), Collections.singleton(PATH)); + assertEquals(expected, update); + } + + /** + * Test drop database event. It should drop database record. + * @throws Exception + */ + @Test + public void testDropDatabase() throws Exception { + Map> update = new HashMap<>(); + update.put(DB.toLowerCase(), Collections.singleton(PATH)); + NotificationEvent event = new NotificationEvent(0, 0, DROP_DATABASE.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONDropDatabaseMessage message = + new SentryJSONDropDatabaseMessage(SERVER, PRINCIPAL, DB, 0L, LOCATION); + Mockito.when(deserializer.getDropDatabaseMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + assertTrue(update.isEmpty()); + } + + /** + * Test drop database event when dropped database location doesn't + * match original database location. Should leave update intact. + * @throws Exception + */ + @Test + public void testDropDatabaseWrongLocation() throws Exception { + Map> update = new HashMap<>(); + update.put(DB.toLowerCase(), Collections.singleton(PATH)); + + NotificationEvent event = new NotificationEvent(0, 0, DROP_DATABASE.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONDropDatabaseMessage message = + new SentryJSONDropDatabaseMessage(SERVER, PRINCIPAL, DB, 0L, + "hdfs:///bad/location"); + Mockito.when(deserializer.getDropDatabaseMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + // DB should stay + Map> expected = new HashMap<>(); + expected.put(DB.toLowerCase(), Collections.singleton(PATH)); + assertEquals(expected, update); + } + + /** + * Test drop database which has tables/partitions. + * Should drop all reated database records but leave unrelated records in place. + * @throws Exception + */ + @Test + public void testDropDatabaseWithTables() throws Exception { + Map> update = new HashMap<>(); + update.put(DB.toLowerCase(), Collections.singleton(PATH)); + update.put(AUTH, Collections.singleton(PATH)); + update.put("unrelated", Collections.singleton(PATH)); + NotificationEvent event = new NotificationEvent(0, 0, DROP_DATABASE.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONDropDatabaseMessage message = + new SentryJSONDropDatabaseMessage(SERVER, PRINCIPAL, DB, 0L, LOCATION); + Mockito.when(deserializer.getDropDatabaseMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + Map> expected = new HashMap<>(); + expected.put("unrelated", Collections.singleton(PATH)); + assertEquals(expected, update); + } + + /** + * Test create table event. It should add table and its location. + * As a result we should have entry {"db1.tab1": {foo/bar}} + * @throws Exception + */ + @Test + public void testCreateTable() throws Exception { + Map> update = new HashMap<>(); + NotificationEvent event = new NotificationEvent(0, 0, CREATE_TABLE.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONCreateTableMessage message = + new SentryJSONCreateTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, Collections.emptyIterator(), 0L); + Mockito.when(deserializer.getCreateTableMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + Map> expected = new HashMap<>(); + expected.put(AUTH, Collections.singleton(PATH)); + assertEquals(expected, update); + } + + /** + * Test drop table event. It should drop table record. + * @throws Exception + */ + @Test + public void testDropTable() throws Exception { + Map> update = new HashMap<>(); + update.put(AUTH, Collections.singleton(PATH)); + NotificationEvent event = new NotificationEvent(0, 0, DROP_TABLE.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONDropTableMessage message = + new SentryJSONDropTableMessage(SERVER, PRINCIPAL, DB, TABLE, 0L, LOCATION); + Mockito.when(deserializer.getDropTableMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + assertTrue(update.isEmpty()); + } + + /** + * Test drop table event. It should drop table record. + * @throws Exception + */ + @Test + public void testDropTableWrongLocation() throws Exception { + Map> update = new HashMap<>(); + update.put(AUTH, Collections.singleton(PATH)); + NotificationEvent event = new NotificationEvent(0, 0, DROP_TABLE.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONDropTableMessage message = + new SentryJSONDropTableMessage(SERVER, PRINCIPAL, DB, TABLE, 0L, + "hdfs:///bad/location"); + Mockito.when(deserializer.getDropTableMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + // DB should stay + assertEquals(Collections.singleton(PATH), update.get(AUTH)); + assertEquals(1, update.size()); + } + + /** + * Test add partition event. It should add table and its location. + * As a result we should have entry {"db1.tab1": {foo/bar, hello/world}} + * @throws Exception + */ + @Test + public void testAddPartition() throws Exception { + Map> update = new HashMap<>(); + Set locations = new HashSet<>(); + locations.add(PATH); + update.put(AUTH, locations); + + NotificationEvent event = new NotificationEvent(0, 0, ADD_PARTITION.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + String partPath = "hello/world"; + String partLocation = uri(partPath); + + SentryJSONAddPartitionMessage message = + new SentryJSONAddPartitionMessage(SERVER, PRINCIPAL, TABLE_OBJ, + Collections.emptyIterator(), Collections.emptyIterator(), + 0L, Collections.singletonList(partLocation)); + Mockito.when(deserializer.getAddPartitionMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + Set expected = new HashSet<>(2); + expected.add(PATH); + expected.add(partPath); + assertEquals(expected, update.get(AUTH)); + } + + /** + * Test drop partition event. It should drop partition info from the list of locations. + * @throws Exception + */ + @Test + public void testDropPartitions() throws Exception { + String partPath = "hello/world"; + String partLocation = uri(partPath); + Map> update = new HashMap<>(); + Set locations = new HashSet<>(); + locations.add(PATH); + locations.add(partPath); + update.put(AUTH, locations); + + NotificationEvent event = new NotificationEvent(0, 0, DROP_PARTITION.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONDropPartitionMessage message = + new SentryJSONDropPartitionMessage(SERVER, PRINCIPAL, TABLE_OBJ, + Collections.>emptyList(), 0L, Collections.singletonList(partLocation)); + Mockito.when(deserializer.getDropPartitionMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + assertEquals(Collections.singleton(PATH), update.get(AUTH)); + } + + /** + * Test alter partition event. It should change partition location + * @throws Exception + */ + @Test + public void testAlterPartition() throws Exception { + String partPath = "hello/world"; + String partLocation = uri(partPath); + + String newPath = "better/world"; + String newLocation = uri(newPath); + + Map> update = new HashMap<>(); + Set locations = new HashSet<>(); + locations.add(PATH); + locations.add(partPath); + update.put(AUTH, locations); + + NotificationEvent event = new NotificationEvent(0, 0, ALTER_PARTITION.toString(), ""); + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + Partition partitionObjBefore = new Partition(null, DB, TABLE, 0, 0, buildStorageDescriptor(partLocation), null); + Partition partitionObjAfter = new Partition(null, DB, TABLE, 0, 0, buildStorageDescriptor(newLocation), null); + + SentryJSONAlterPartitionMessage message = + new SentryJSONAlterPartitionMessage(SERVER, PRINCIPAL, TABLE_OBJ, + partitionObjBefore, partitionObjAfter, 0L); + + Mockito.when(deserializer.getAlterPartitionMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + + Set expected = new HashSet<>(2); + expected.add(PATH); + expected.add(newPath); + assertEquals(expected, update.get(AUTH)); + } + + /** + * Test alter table event that changes database name when there are no tables. + * @throws Exception + */ + @Test + public void testAlterTableChangeDbNameNoTables() throws Exception { + Map> update = new HashMap<>(); + update.put(DB.toLowerCase(), Collections.singleton(PATH)); + String newDbName = "Db2"; + + NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), ""); + event.setDbName(newDbName); + event.setTableName(TABLE); + + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONAlterTableMessage message = + new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, TABLE_OBJ, 0L); + + Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + assertEquals(Collections.singleton(PATH), update.get(newDbName.toLowerCase())); + assertFalse(update.containsKey(DB.toLowerCase())); + } + + @Test + /** + * Test alter table event that changes database name when there are tables. + * All entries like "dbName.tableName" should have dbName changed to the new name. + * @throws Exception + */ + public void testAlterTableChangeDbNameWithTables() throws Exception { + Map> update = new HashMap<>(); + update.put(DB.toLowerCase(), Collections.singleton(PATH)); + Set locations = new HashSet<>(1); + locations.add(PATH); + update.put(AUTH, locations); + + String newDbName = "Db2"; + String newAuth = newDbName.toLowerCase() + "." + TABLE.toLowerCase(); + + NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), ""); + event.setDbName(newDbName); + event.setTableName(TABLE); + + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONAlterTableMessage message = + new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, TABLE_OBJ, 0L); + + Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + Map> expected = new HashMap<>(2); + expected.put(newDbName.toLowerCase(), Collections.singleton(PATH)); + expected.put(newAuth, Collections.singleton(PATH)); + assertEquals(expected, update); + } + + /** + * Test alter table event that changes table name. + * @throws Exception + */ + @Test + public void testAlterTableChangeTableName() throws Exception { + Map> update = new HashMap<>(); + update.put(DB.toLowerCase(), Collections.singleton(PATH)); + Set locations = new HashSet<>(1); + locations.add(PATH); + update.put(AUTH, locations); + + String newTableName = "Table2"; + String newAuth = DB.toLowerCase() + "." + newTableName.toLowerCase(); + + NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), ""); + event.setDbName(DB); + event.setTableName(newTableName); + + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + SentryJSONAlterTableMessage message = + new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, TABLE_OBJ, 0L); + + Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + Map> expected = new HashMap<>(2); + expected.put(DB.toLowerCase(), Collections.singleton(PATH)); + expected.put(newAuth, Collections.singleton(PATH)); + assertEquals(expected, update); + } + + /** + * Test alter table event that changes object location. + * @throws Exception + */ + @Test + public void testAlterTableChangeLocation() throws Exception { + Map> update = new HashMap<>(); + update.put(DB.toLowerCase(), Collections.singleton(PATH)); + Set locations = new HashSet<>(1); + locations.add(PATH); + update.put(AUTH, locations); + + NotificationEvent event = new NotificationEvent(0, 0, ALTER_TABLE.toString(), ""); + event.setDbName(DB); + event.setTableName(TABLE); + + String newPath = "hello/world"; + String newLocation = uri(newPath); + + MessageDeserializer deserializer = Mockito.mock(SentryJSONMessageDeserializer.class); + + Table tableWithNewLocation = buildTable(DB, TABLE, newLocation); + SentryJSONAlterTableMessage message = + new SentryJSONAlterTableMessage(SERVER, PRINCIPAL, TABLE_OBJ, tableWithNewLocation, 0L); + + Mockito.when(deserializer.getAlterTableMessage("")).thenReturn(message); + FullUpdateModifier.applyEvent(update, event, deserializer); + Map> expected = new HashMap<>(2); + expected.put(DB.toLowerCase(), Collections.singleton(PATH)); + expected.put(AUTH.toLowerCase(), Collections.singleton(newPath)); + assertEquals(expected, update); + } + + /** + * Test renamePrefixKeys function. + * We ask to rename "foo.bar" key to "foo.baz" key. + * @throws Exception + */ + @Test + public void testRenamePrefixKeys() throws Exception { + String oldKey = "foo."; + String newKey = "baz."; + String postfix = "bar"; + Map> update = new HashMap<>(); + update.put(oldKey + postfix , Collections.emptySet()); + FullUpdateModifier.renamePrefixKeys(update, oldKey, newKey); + assertEquals(1, update.size()); + assertTrue(update.containsKey(newKey + postfix)); + } + + /** + * Test renamePostfixKeys and RenamePrefixKeys functions mwhen the destination keys exist. + * Should nto change anything. + * We ask to rename "foo.bar" key to "baz.bar" key. + * @throws Exception + */ + @Test + public void testRenameKeysWithConflicts() throws Exception { + Map> update = new HashMap<>(); + update.put("foo.bar", Collections.emptySet()); + update.put("baz.bar", Collections.emptySet()); + Map> expected = new HashMap<>(update); + + FullUpdateModifier.renamePrefixKeys(update, "foo.", "baz."); + assertEquals(update, expected); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java new file mode 100644 index 0000000..83a1bec --- /dev/null +++ b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestHiveNotificationFetcher.java @@ -0,0 +1,163 @@ +/* + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at +

+ http://www.apache.org/licenses/LICENSE-2.0 +

+ Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + */ + +package org.apache.sentry.service.thrift; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.IMetaStoreClient.NotificationFilter; +import org.apache.hadoop.hive.metastore.api.NotificationEvent; +import org.apache.hadoop.hive.metastore.api.NotificationEventResponse; +import org.apache.sentry.hdfs.UniquePathsUpdate; +import org.apache.sentry.provider.db.service.persistent.SentryStore; +import org.junit.Test; +import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +public class TestHiveNotificationFetcher { + @Test + public void testGetEmptyNotificationsWhenHmsReturnsANullResponse() throws Exception { + SentryStore store = Mockito.mock(SentryStore.class); + HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class); + HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class); + + Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient)); + + try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) { + List events; + + Mockito.when(hmsClient.getNextNotification(0, Integer.MAX_VALUE, null)) + .thenReturn(null); + + events = fetcher.fetchNotifications(0); + assertTrue(events.isEmpty()); + } + } + + @Test + public void testGetEmptyNotificationsWhenHmsReturnsEmptyEvents() throws Exception { + SentryStore store = Mockito.mock(SentryStore.class); + HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class); + HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class); + + Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient)); + + try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) { + List events; + + Mockito.when(hmsClient.getNextNotification(0, Integer.MAX_VALUE, null)) + .thenReturn(new NotificationEventResponse(Collections.emptyList())); + + events = fetcher.fetchNotifications(0); + assertTrue(events.isEmpty()); + } + } + + @Test + public void testGetAllNotificationsReturnedByHms() throws Exception { + SentryStore store = Mockito.mock(SentryStore.class); + HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class); + HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class); + + Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient)); + + try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) { + List events; + + Mockito.when(hmsClient.getNextNotification(0, Integer.MAX_VALUE, null)) + .thenReturn(new NotificationEventResponse( + Arrays.asList( + new NotificationEvent(1L, 0, "CREATE_DATABASE", ""), + new NotificationEvent(2L, 0, "CREATE_TABLE", "") + ) + )); + + events = fetcher.fetchNotifications(0); + assertEquals(2, events.size()); + assertEquals(1, events.get(0).getEventId()); + assertEquals("CREATE_DATABASE", events.get(0).getEventType()); + assertEquals(2, events.get(1).getEventId()); + assertEquals("CREATE_TABLE", events.get(1).getEventType()); + } + } + + @Test + public void testGetDuplicatedEventsAndFilterEventsAlreadySeen() throws Exception { + final SentryStore store = Mockito.mock(SentryStore.class); + HiveConnectionFactory hmsConnection = Mockito.mock(HiveConnectionFactory.class); + HiveMetaStoreClient hmsClient = Mockito.mock(HiveMetaStoreClient.class); + + Mockito.when(hmsConnection.connect()).thenReturn(new HMSClient(hmsClient)); + + try (HiveNotificationFetcher fetcher = new HiveNotificationFetcher(store, hmsConnection)) { + List events; + + /* + * Requesting an ID > 0 will request all notifications from 0 again but filter those + * already seen notifications with ID = 1 + */ + + // This mock will also test that the NotificationFilter works as expected + Mockito.when(hmsClient.getNextNotification(Mockito.eq(0L), Mockito.eq(Integer.MAX_VALUE), + (NotificationFilter) Mockito.notNull())).thenAnswer(new Answer() { + @Override + public NotificationEventResponse answer(InvocationOnMock invocation) + throws Throwable { + NotificationFilter filter = (NotificationFilter) invocation.getArguments()[2]; + NotificationEventResponse response = new NotificationEventResponse(); + + List events = Arrays.asList( + new NotificationEvent(1L, 0, "CREATE_DATABASE", ""), + new NotificationEvent(1L, 0, "CREATE_TABLE", ""), + new NotificationEvent(2L, 0, "ALTER_TABLE", "") + ); + + for (NotificationEvent event : events) { + String hash = UniquePathsUpdate.sha1(event); + + // We simulate that CREATE_DATABASE is already processed + if (event.getEventType().equals("CREATE_DATABASE")) { + Mockito.when(store.isNotificationProcessed(Mockito.eq(hash))).thenReturn(true); + } else { + Mockito.when(store.isNotificationProcessed(Mockito.eq(hash))).thenReturn(false); + } + + if (filter.accept(event)) { + response.addToEvents(event); + } + } + + return response; + } + }); + + events = fetcher.fetchNotifications(1); + assertEquals(2, events.size()); + assertEquals(1, events.get(0).getEventId()); + assertEquals("CREATE_TABLE", events.get(0).getEventType()); + assertEquals(2, events.get(1).getEventId()); + assertEquals("ALTER_TABLE", events.get(1).getEventType()); + } + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java new file mode 100644 index 0000000..38668ca --- /dev/null +++ b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryHMSClient.java @@ -0,0 +1,344 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.service.thrift; + +import com.google.common.collect.Lists; +import com.google.common.collect.Sets; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId; +import org.apache.hadoop.hive.metastore.api.Database; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.StorageDescriptor; +import org.apache.hadoop.hive.metastore.api.Table; +import org.apache.sentry.provider.db.service.persistent.PathsImage; +import org.apache.thrift.TException; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.Mockito; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import javax.security.auth.login.LoginException; + +/** + * Test mocks HiveMetaStoreClient class and tests SentryHMSClient. + */ +public class TestSentryHMSClient { + + private static final Configuration conf = new Configuration(); + private static SentryHMSClient client; + private static MockHMSClientFactory hiveConnectionFactory; + + /** + * Create mock database with the given name + * + * @param name Database name + * @return Mock database object + */ + private static Database makeDb(String name) { + Database db = Mockito.mock(Database.class); + Mockito.when(db.getName()).thenReturn(name); + Mockito.when(db.getLocationUri()).thenReturn("hdfs:///" + name); + return db; + } + + /** + * Create mock table + * + * @param dbName db for this table + * @param tableName name of the table + * @return mock table object + */ + private static Table makeTable(String dbName, String tableName) { + Table table = Mockito.mock(Table.class); + Mockito.when(table.getDbName()).thenReturn(dbName); + Mockito.when(table.getTableName()).thenReturn(tableName); + StorageDescriptor sd = Mockito.mock(StorageDescriptor.class); + Mockito.when(sd.getLocation()).thenReturn( + String.format("hdfs:///%s/%s", dbName, tableName)); + Mockito.when(table.getSd()).thenReturn(sd); + return table; + } + + /** + * Create mock partition + * + * @param dbName database for this partition + * @param tableName table for this partition + * @param partName partition name + * @return mock partition object + */ + private static Partition makePartition(String dbName, String tableName, String partName) { + Partition partition = Mockito.mock(Partition.class); + StorageDescriptor sd = Mockito.mock(StorageDescriptor.class); + Mockito.when(sd.getLocation()).thenReturn( + String.format("hdfs:///%s/%s/%s", dbName, tableName, partName)); + Mockito.when(partition.getSd()).thenReturn(sd); + return partition; + } + + @BeforeClass + static public void initialize() throws IOException, LoginException { + hiveConnectionFactory = new MockHMSClientFactory(); + client = new SentryHMSClient(conf, (HiveConnectionFactory)hiveConnectionFactory); + } + + /** + * Creating snapshot when SentryHMSClient is not connected to HMS + */ + @Test + public void testSnapshotCreationWithOutClientConnected() throws Exception { + // Make sure that client is not connected + Assert.assertFalse(client.isConnected()); + PathsImage snapshotInfo = client.getFullSnapshot(); + Assert.assertTrue(snapshotInfo.getPathImage().isEmpty()); + } + + /** + * Creating snapshot when HMS doesn't have any data + */ + @Test + public void testSnapshotCreationWithNoHmsData() throws Exception { + MockClient mockClient = new MockClient(new HiveSnapshot(), 1); + client.setClient(mockClient.client); + // Make sure that client is connected + Assert.assertTrue(client.isConnected()); + PathsImage snapshotInfo = client.getFullSnapshot(); + Assert.assertTrue(snapshotInfo.getPathImage().isEmpty()); + } + + /** + * Creating a snapshot when there is data but there are updates to HMS data mean while + */ + @Test + public void testSnapshotCreationWhenDataIsActivelyUpdated() throws Exception { + HiveTable tab21 = new HiveTable("tab21"); + HiveTable tab31 = new HiveTable("tab31").add("part311").add("part312"); + HiveDb db3 = new HiveDb("db3", Lists.newArrayList(tab31)); + HiveDb db2 = new HiveDb("db2", Lists.newArrayList(tab21)); + HiveDb db1 = new HiveDb("db1"); + HiveSnapshot snap = new HiveSnapshot().add(db1).add(db2).add(db3); + final MockClient mockClient = new MockClient(snap, 1); + + client.setClient(mockClient.client); + hiveConnectionFactory.setClient(mockClient); + // Make sure that client is connected + Assert.assertTrue(client.isConnected()); + PathsImage snapshotInfo = client.getFullSnapshot(); + // Make sure that snapshot is not empty + Assert.assertTrue(!snapshotInfo.getPathImage().isEmpty()); + + Mockito.when(mockClient.client.getCurrentNotificationEventId()). + thenAnswer(new Answer() { + @Override + public CurrentNotificationEventId answer(InvocationOnMock invocation) + throws Throwable { + return new CurrentNotificationEventId(mockClient.incrementNotificationEventId()); + } + + }); + + snapshotInfo = client.getFullSnapshot(); + Assert.assertTrue(snapshotInfo.getPathImage().isEmpty()); + } + + /** + * Creating a snapshot when there is data in HMS. + */ + @Test + public void testSnapshotCreationSuccess() throws Exception { + HiveTable tab21 = new HiveTable("tab21"); + HiveTable tab31 = new HiveTable("tab31"); + HiveDb db3 = new HiveDb("db3", Lists.newArrayList(tab31)); + HiveDb db2 = new HiveDb("db2", Lists.newArrayList(tab21)); + HiveDb db1 = new HiveDb("db1"); + HiveSnapshot snap = new HiveSnapshot().add(db1).add(db2).add(db3); + MockClient mockClient = new MockClient(snap, 1); + Mockito.when(mockClient.client.getCurrentNotificationEventId()). + thenReturn(new CurrentNotificationEventId(mockClient.eventId)); + client.setClient(mockClient.client); + hiveConnectionFactory.setClient(mockClient); + // Make sure that client is connected + Assert.assertTrue(client.isConnected()); + + PathsImage snapshotInfo = client.getFullSnapshot(); + Assert.assertEquals(5, snapshotInfo.getPathImage().size()); + Assert.assertEquals(Sets.newHashSet("db1"), snapshotInfo.getPathImage().get("db1")); + Assert.assertEquals(Sets.newHashSet("db2"), snapshotInfo.getPathImage().get("db2")); + Assert.assertEquals(Sets.newHashSet("db3"), snapshotInfo.getPathImage().get("db3")); + Assert.assertEquals(Sets.newHashSet("db2/tab21"), + snapshotInfo.getPathImage().get("db2.tab21")); + Assert.assertEquals(Sets.newHashSet("db3/tab31"), snapshotInfo.getPathImage().get("db3.tab31")); + Assert.assertEquals(snapshotInfo.getId(), mockClient.eventId); + + } + + /** + * Representation of a Hive table. A table has a name and a list of partitions. + */ + private static class HiveTable { + + private final String name; + private final List partitions; + + HiveTable(String name) { + this.name = name; + this.partitions = new ArrayList<>(); + } + + HiveTable add(String partition) { + partitions.add(partition); + return this; + } + } + + /** + * Representation of a Hive database. A database has a name and a list of tables + */ + private static class HiveDb { + + final String name; + Collection tables; + + @SuppressWarnings("SameParameterValue") + HiveDb(String name) { + this.name = name; + tables = new ArrayList<>(); + } + + HiveDb(String name, Collection tables) { + this.name = name; + this.tables = tables; + if (this.tables == null) { + this.tables = new ArrayList<>(); + } + } + + void add(HiveTable table) { + this.tables.add(table); + } + } + + /** + * Representation of a full Hive snapshot. A snapshot is collection of databases + */ + private static class HiveSnapshot { + + final List databases = new ArrayList<>(); + + HiveSnapshot() { + } + + HiveSnapshot(Collection dblist) { + if (dblist != null) { + databases.addAll(dblist); + } + } + + HiveSnapshot add(HiveDb db) { + this.databases.add(db); + return this; + } + } + + /** + * Mock for HMSClientFactory + */ + private static class MockHMSClientFactory implements HiveConnectionFactory { + + private HiveMetaStoreClient mClient; + + public MockHMSClientFactory() { + mClient = null; + } + + void setClient(MockClient mockClient) { + this.mClient = mockClient.client; + } + @Override + public HMSClient connect() throws IOException, InterruptedException, MetaException { + return new HMSClient(mClient); + } + + @Override + public void close() throws Exception { + } + } + + /** + * Convert Hive snapshot to mock client that will return proper values + * for the snapshot. + */ + private static class MockClient { + + public HiveMetaStoreClient client; + public long eventId; + + MockClient(HiveSnapshot snapshot, long eventId) throws TException { + this.eventId = eventId; + client = Mockito.mock(HiveMetaStoreClient.class); + List dbNames = new ArrayList<>(snapshot.databases.size()); + // Walk over all databases and mock appropriate objects + for (HiveDb mdb : snapshot.databases) { + String dbName = mdb.name; + dbNames.add(dbName); + Database db = makeDb(dbName); + Mockito.when(client.getDatabase(dbName)).thenReturn(db); + List tableNames = new ArrayList<>(mdb.tables.size()); + // Walk over all tables for the database and mock appropriate objects + for (HiveTable table : mdb.tables) { + String tableName = table.name; + tableNames.add(tableName); + Table mockTable = makeTable(dbName, tableName); + Mockito.when(client.getTableObjectsByName(dbName, + Lists.newArrayList(tableName))) + .thenReturn(Lists.newArrayList(mockTable)); + Mockito.when(client.listPartitionNames(dbName, tableName, (short) -1)) + .thenReturn(table.partitions); + // Walk across all partitions and mock appropriate objects + for (String partName : table.partitions) { + Partition p = makePartition(dbName, tableName, partName); + Mockito.when(client.getPartitionsByNames(dbName, tableName, + Lists.newArrayList(partName))) + .thenReturn(Lists.newArrayList(p)); + } + } + Mockito.when(client.getAllTables(dbName)).thenReturn(tableNames); + } + // Return all database names + Mockito.when(client.getAllDatabases()).thenReturn(dbNames); + Mockito.when(client.getCurrentNotificationEventId()). + thenReturn(new CurrentNotificationEventId(eventId)); + + } + + public Long incrementNotificationEventId() { + eventId = eventId + 1; + return eventId; + } + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java new file mode 100644 index 0000000..4f71e1c --- /dev/null +++ b/sentry-service/sentry-service-server/src/test/java/org/apache/sentry/service/thrift/TestSentryStateBank.java @@ -0,0 +1,84 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one or more contributor license + * agreements. See the NOTICE file distributed with this work for additional information regarding + * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with the License. You may obtain + * a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License + * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express + * or implied. See the License for the specific language governing permissions and limitations under + * the License. + */ +package org.apache.sentry.service.thrift; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import java.util.Arrays; +import java.util.HashSet; +import org.junit.Before; +import org.junit.Test; + +/** + * + */ +public class TestSentryStateBank { + + @Before + public void setUp() { + SentryStateBank.clearAllStates(); + } + + @Test + public void testEnableState() { + SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE); + assertTrue("Expected FIRST_STATE to be enabled", + SentryStateBank.isEnabled(TestState.COMPONENT, TestState.FIRST_STATE)); + assertFalse("Expected SECOND_STATE to be disabled", + SentryStateBank.isEnabled(TestState.COMPONENT, TestState.SECOND_STATE)); + } + + @Test + public void testStatesGetDisabled() { + SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE); + assertTrue("Expected FIRST_STATE to be enabled", + SentryStateBank.isEnabled(TestState.COMPONENT, TestState.FIRST_STATE)); + SentryStateBank.disableState(TestState.COMPONENT, TestState.FIRST_STATE); + assertFalse("Expected FIRST_STATE to be disabled", + SentryStateBank.isEnabled(TestState.COMPONENT, TestState.FIRST_STATE)); + } + + @Test + public void testCheckMultipleStateCheckSuccess() { + SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE); + SentryStateBank.enableState(TestState.COMPONENT, TestState.SECOND_STATE); + + assertTrue("Expected both FIRST_STATE and SECOND_STATE to be enabled", + SentryStateBank.hasStatesEnabled(TestState.COMPONENT, new HashSet( + Arrays.asList(TestState.FIRST_STATE, TestState.SECOND_STATE)))); + } + + @Test + public void testCheckMultipleStateCheckFailure() { + SentryStateBank.enableState(TestState.COMPONENT, TestState.FIRST_STATE); + assertFalse("Expected only FIRST_STATE to be enabled", + SentryStateBank.hasStatesEnabled(TestState.COMPONENT, new HashSet( + Arrays.asList(TestState.FIRST_STATE, TestState.SECOND_STATE)))); + } + + + public enum TestState implements SentryState { + FIRST_STATE, + SECOND_STATE; + + public static final String COMPONENT = "TestState"; + + @Override + public long getValue() { + return 1 << this.ordinal(); + } + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/resources/cacerts.jks ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/resources/cacerts.jks b/sentry-service/sentry-service-server/src/test/resources/cacerts.jks new file mode 100644 index 0000000..e69de29 http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/resources/keystore.jks ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/resources/keystore.jks b/sentry-service/sentry-service-server/src/test/resources/keystore.jks new file mode 100644 index 0000000..e69de29 http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-service/sentry-service-server/src/test/resources/log4j.properties ---------------------------------------------------------------------- diff --git a/sentry-service/sentry-service-server/src/test/resources/log4j.properties b/sentry-service/sentry-service-server/src/test/resources/log4j.properties new file mode 100644 index 0000000..9766758 --- /dev/null +++ b/sentry-service/sentry-service-server/src/test/resources/log4j.properties @@ -0,0 +1,34 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +# Define some default values that can be overridden by system properties. +# +# For testing, it may also be convenient to specify + +log4j.rootLogger=DEBUG,console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d (%t) [%p - %l] %m%n + +log4j.logger.org.apache.hadoop.conf.Configuration=INFO +log4j.logger.org.apache.hadoop.metrics2=INFO +log4j.logger.org.apache.directory=INFO +log4j.logger.org.apache.directory.api.ldap.model.entry.AbstractValue=WARN http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-tests/sentry-tests-hive/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive/pom.xml b/sentry-tests/sentry-tests-hive/pom.xml index 6816249..c753acf 100644 --- a/sentry-tests/sentry-tests-hive/pom.xml +++ b/sentry-tests/sentry-tests-hive/pom.xml @@ -263,7 +263,8 @@ limitations under the License. org.apache.sentry - sentry-provider-db + sentry-service-server + ${project.version} test http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-tests/sentry-tests-kafka/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-kafka/pom.xml b/sentry-tests/sentry-tests-kafka/pom.xml index e61f64d..03bc574 100644 --- a/sentry-tests/sentry-tests-kafka/pom.xml +++ b/sentry-tests/sentry-tests-kafka/pom.xml @@ -58,7 +58,8 @@ limitations under the License. org.apache.sentry - sentry-provider-db + sentry-service-server + ${project.version} org.apache.curator http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-tests/sentry-tests-solr/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-solr/pom.xml b/sentry-tests/sentry-tests-solr/pom.xml index d9540a6..db33ee9 100644 --- a/sentry-tests/sentry-tests-solr/pom.xml +++ b/sentry-tests/sentry-tests-solr/pom.xml @@ -6,9 +6,7 @@ this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -16,7 +14,7 @@ See the License for the specific language governing permissions and limitations under the License. --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> 4.0.0 @@ -99,6 +97,53 @@ limitations under the License. + org.apache.sentry + sentry-service-server + ${project.version} + + + org.apache.sentry + sentry-core-common + + + org.eclipse.jetty + jetty-util + + + org.eclipse.jetty.aggregate + jetty-all + + + org.apache.hive + hive-beeline + + + io.dropwizard.metrics + metrics-core + + + io.dropwizard.metrics + metrics-jvm + + + io.dropwizard.metrics + metrics-servlets + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + + + org.apache.hive + hive-metastore + + + org.apache.hive + hive-common + + + + org.apache.hive hive-metastore @@ -284,4 +329,4 @@ limitations under the License. - + \ No newline at end of file http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-tests/sentry-tests-sqoop/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-sqoop/pom.xml b/sentry-tests/sentry-tests-sqoop/pom.xml index eed8269..e280c9e 100644 --- a/sentry-tests/sentry-tests-sqoop/pom.xml +++ b/sentry-tests/sentry-tests-sqoop/pom.xml @@ -81,7 +81,8 @@ limitations under the License. org.apache.sentry - sentry-provider-db + sentry-service-server + ${project.version} test http://git-wip-us.apache.org/repos/asf/sentry/blob/b97f5c7a/sentry-tools/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-tools/pom.xml b/sentry-tools/pom.xml index b882c6f..05cd75c 100644 --- a/sentry-tools/pom.xml +++ b/sentry-tools/pom.xml @@ -40,7 +40,12 @@ limitations under the License. org.apache.sentry - sentry-provider-db + sentry-service-server + ${project.version} + + + org.apache.sentry + sentry-service-server ${project.version} test-jar test