atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From venkat...@apache.org
Subject [16/50] [abbrv] incubator-atlas git commit: IDE java code reformat
Date Fri, 12 Jun 2015 20:38:36 GMT
IDE java code reformat


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/fef50cee
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/fef50cee
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/fef50cee

Branch: refs/heads/apache-local
Commit: fef50cee33d3cb8becb79de754950ad363c6adc4
Parents: 2262478
Author: Shwetha GS <sshivalingamurthy@hortonworks.com>
Authored: Fri Jun 12 13:58:50 2015 +0530
Committer: Shwetha GS <sshivalingamurthy@hortonworks.com>
Committed: Fri Jun 12 13:58:50 2015 +0530

----------------------------------------------------------------------
 .../org/apache/atlas/falcon/FalconImporter.java |  21 +-
 .../apache/atlas/falcon/FalconTypeSystem.java   |  72 +-
 .../apache/atlas/falcon/FalconImporterTest.java |   7 +-
 .../atlas/falcon/FalconTypeSystemTest.java      |   6 +-
 .../atlas/hive/bridge/HiveMetaStoreBridge.java  |  43 +-
 .../org/apache/atlas/hive/hook/HiveHook.java    |  44 +-
 .../hive/model/HiveDataModelGenerator.java      | 358 ++++------
 .../atlas/hive/hook/BaseSSLAndKerberosTest.java |  18 +-
 .../org/apache/atlas/hive/hook/HiveHookIT.java  |  33 +-
 .../hook/NegativeSSLAndKerberosHiveHookIT.java  |   3 +-
 .../hive/hook/SSLAndKerberosHiveHookIT.java     |   8 +-
 .../apache/atlas/hive/hook/SSLHiveHookIT.java   |  26 +-
 .../apache/hadoop/metadata/bridge/ABridge.java  |  34 +-
 .../hadoop/metadata/bridge/BridgeManager.java   |  30 +-
 .../metadata/bridge/BridgeTypeBootstrapper.java |  33 +-
 .../bridge/hivestructure/HiveMetaImporter.java  |  18 +-
 .../hivestructure/HiveStructureBridge.java      |  91 ++-
 .../metadata/bridge/module/BridgeModule.java    |  18 +-
 .../metadata/bridge/BridgeManagerTest.java      |   3 +-
 .../hivelineage/TestHiveLineageBridge.java      |   3 +-
 .../main/java/org/apache/atlas/AtlasClient.java |  28 +-
 .../atlas/security/SecureClientUtils.java       |  35 +-
 .../apache/atlas/security/BaseSecurityTest.java |  40 +-
 pom.xml                                         |  45 +-
 .../java/org/apache/atlas/GraphTransaction.java |   6 +-
 .../atlas/GraphTransactionInterceptor.java      |   2 +-
 .../apache/atlas/RepositoryMetadataModule.java  |  10 +-
 .../atlas/discovery/HiveLineageService.java     |  67 +-
 .../apache/atlas/discovery/SearchIndexer.java   |   1 -
 .../graph/DefaultGraphPersistenceStrategy.java  | 124 ++--
 .../graph/GraphBackedDiscoveryService.java      |  17 +-
 .../org/apache/atlas/repository/Constants.java  |   2 +-
 .../repository/EntityNotFoundException.java     |   2 +-
 .../repository/IndexCreationException.java      |   2 +-
 .../apache/atlas/repository/IndexException.java |   3 +-
 .../atlas/repository/MetadataRepository.java    |   7 +-
 .../atlas/repository/RepositoryException.java   |   3 +-
 .../graph/GraphBackedMetadataRepository.java    | 679 ++++++++-----------
 .../graph/GraphBackedSearchIndexer.java         | 185 +++--
 .../atlas/repository/graph/GraphHelper.java     |  44 +-
 .../repository/memory/AttributeStores.java      | 146 ++--
 .../atlas/repository/memory/ClassStore.java     |   6 +-
 .../memory/HierarchicalTypeStore.java           |  11 +-
 .../repository/memory/IAttributeStore.java      |   3 +-
 .../atlas/repository/memory/MemRepository.java  |  26 +-
 .../memory/ReplaceIdWithInstance.java           |  20 +-
 .../atlas/repository/memory/StructStore.java    |   3 +-
 .../atlas/repository/memory/TraitStore.java     |   3 +-
 .../typestore/GraphBackedTypeStore.java         |  98 +--
 .../atlas/services/DefaultMetadataService.java  |  83 +--
 .../apache/atlas/services/MetadataService.java  |   7 +-
 .../test/java/org/apache/atlas/TestUtils.java   |  57 +-
 .../GraphBackedDiscoveryServiceTest.java        | 108 ++-
 .../atlas/discovery/HiveLineageServiceTest.java | 415 +++++-------
 .../org/apache/atlas/repository/BaseTest.java   | 109 ++-
 .../GraphBackedMetadataRepositoryTest.java      | 174 ++---
 .../graph/GraphRepoMapperScaleTest.java         |  96 +--
 .../atlas/repository/memory/EnumTest.java       |  88 +--
 .../repository/memory/InstanceE2ETest.java      |  48 +-
 .../atlas/repository/memory/StructTest.java     |   3 +-
 .../atlas/repository/memory/TraitTest.java      |  20 +-
 .../typestore/GraphBackedTypeStoreTest.java     |   4 +-
 .../org/apache/atlas/TypeNotFoundException.java |   2 +-
 .../apache/atlas/typesystem/Referenceable.java  |   5 +-
 .../persistence/DownCastStructInstance.java     |   5 +-
 .../apache/atlas/typesystem/persistence/Id.java |  23 +-
 .../atlas/typesystem/persistence/MapIds.java    |   3 +-
 .../persistence/ReferenceableInstance.java      |  20 +-
 .../typesystem/persistence/StructInstance.java  | 187 +++--
 .../typesystem/types/AttributeDefinition.java   |  47 +-
 .../atlas/typesystem/types/AttributeInfo.java   |   5 +-
 .../atlas/typesystem/types/ClassType.java       |  48 +-
 .../atlas/typesystem/types/DataTypes.java       |  40 +-
 .../typesystem/types/DownCastFieldMapping.java  |   3 +-
 .../apache/atlas/typesystem/types/EnumType.java |  10 +-
 .../typesystem/types/EnumTypeDefinition.java    |  16 +-
 .../atlas/typesystem/types/EnumValue.java       |  16 +-
 .../atlas/typesystem/types/FieldMapping.java    |  15 +-
 .../typesystem/types/HierarchicalType.java      |  94 +--
 .../types/HierarchicalTypeDefinition.java       |  35 +-
 .../atlas/typesystem/types/Multiplicity.java    |  21 +-
 .../typesystem/types/ObjectGraphTraversal.java  |  11 +-
 .../typesystem/types/ObjectGraphWalker.java     |  17 +-
 .../atlas/typesystem/types/StructType.java      |  39 +-
 .../typesystem/types/StructTypeDefinition.java  |  16 +-
 .../atlas/typesystem/types/TraitType.java       |   6 +-
 .../atlas/typesystem/types/TypeSystem.java      | 213 +++---
 .../atlas/typesystem/types/TypeUtils.java       |  14 +-
 .../typesystem/types/TypedStructHandler.java    |  16 +-
 .../types/ValueConversionException.java         |  11 +-
 .../atlas/typesystem/types/utils/TypesUtil.java |  32 +-
 .../typesystem/json/SerializationJavaTest.java  |  50 +-
 .../apache/atlas/typesystem/types/BaseTest.java | 120 ++--
 .../apache/atlas/typesystem/types/EnumTest.java |  82 +--
 .../atlas/typesystem/types/StructTest.java      |   3 +-
 .../atlas/typesystem/types/TraitTest.java       |  29 +-
 .../typesystem/types/TypeInheritanceTest.java   |  47 +-
 .../atlas/typesystem/types/TypeSystemTest.java  |  46 +-
 .../atlas/typesystem/types/ValidationTest.java  |  21 +-
 webapp/pom.xml                                  |   2 +-
 webapp/src/main/java/org/apache/atlas/Main.java |  14 +-
 .../org/apache/atlas/examples/QuickStart.java   | 346 ++++------
 .../atlas/util/CredentialProviderUtility.java   |  33 +-
 .../web/errors/LoggingExceptionMapper.java      |   7 +-
 .../apache/atlas/web/filters/AuditFilter.java   |  17 +-
 .../atlas/web/listeners/GuiceServletConfig.java |   8 +-
 .../atlas/web/listeners/LoginProcessor.java     |  13 +-
 .../apache/atlas/web/params/AbstractParam.java  |   8 +-
 .../atlas/web/resources/AdminResource.java      |   9 +-
 .../atlas/web/resources/EntityResource.java     |  88 +--
 .../web/resources/HiveLineageResource.java      |  36 +-
 .../resources/MetadataDiscoveryResource.java    |  56 +-
 .../web/resources/RexsterGraphResource.java     |  64 +-
 .../atlas/web/resources/TypesResource.java      |  25 +-
 .../atlas/web/service/EmbeddedServer.java       |   3 +-
 .../atlas/web/service/SecureEmbeddedServer.java |   9 +-
 .../apache/atlas/web/util/DateTimeHelper.java   |  15 +-
 .../org/apache/atlas/web/util/Servlets.java     |   7 +-
 .../atlas/CredentialProviderUtilityIT.java      |  47 +-
 .../MetadataAuthenticationKerberosFilterIT.java |   2 +-
 .../MetadataAuthenticationSimpleFilterIT.java   |   8 +-
 .../atlas/web/listeners/LoginProcessorIT.java   |   3 +-
 .../web/resources/AdminJerseyResourceIT.java    |  19 +-
 .../atlas/web/resources/BaseResourceIT.java     |   9 +-
 .../web/resources/EntityJerseyResourceIT.java   | 219 +++---
 .../resources/HiveLineageJerseyResourceIT.java  | 211 ++----
 .../MetadataDiscoveryJerseyResourceIT.java      |  60 +-
 .../resources/RexsterGraphJerseyResourceIT.java |  15 +-
 .../web/resources/TypesJerseyResourceIT.java    |  89 +--
 .../web/service/SecureEmbeddedServerIT.java     |   4 +-
 .../web/service/SecureEmbeddedServerITBase.java |  32 +-
 131 files changed, 2713 insertions(+), 3832 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconImporter.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconImporter.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconImporter.java
index 4ad1e3d..82710ba 100755
--- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconImporter.java
+++ b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconImporter.java
@@ -61,14 +61,16 @@ public class FalconImporter {
         this.repository = repo;
     }
 
-    private Entity getEntity(FalconClient client, EntityType type, String name) throws FalconCLIException, JAXBException {
+    private Entity getEntity(FalconClient client, EntityType type, String name)
+    throws FalconCLIException, JAXBException {
         String entityStr = client.getDefinition(type.name(), name);
         return (Entity) type.getUnmarshaller().unmarshal(new StringReader(entityStr));
     }
 
     public void importClusters() throws MetadataException {
         try {
-            EntityList clusters = client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null);
+            EntityList clusters =
+                    client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null);
             for (EntityList.EntityElement element : clusters.getElements()) {
                 Cluster cluster = (Cluster) getEntity(client, EntityType.CLUSTER, element.name);
 
@@ -80,7 +82,8 @@ public class FalconImporter {
                     acl.set("owner", cluster.getACL().getOwner());
                     acl.set("group", cluster.getACL().getGroup());
                     acl.set("permission", cluster.getACL().getPermission());
-                    StructType aclType = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.ACL.name());
+                    StructType aclType =
+                            typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.ACL.name());
                     clusterRef.set("acl", aclType.convert(acl, Multiplicity.REQUIRED));
                 }
 
@@ -88,7 +91,8 @@ public class FalconImporter {
                     String[] parts = cluster.getTags().split(",");
                     List<ITypedInstance> tags = new ArrayList<>();
                     for (String part : parts) {
-                        TraitType tagType = typeSystem.getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name());
+                        TraitType tagType =
+                                typeSystem.getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name());
                         String[] kv = part.trim().split("=");
                         Struct tag = new Struct(FalconTypeSystem.DefinedTypes.TAG.name());
                         tag.set("name", kv[0]);
@@ -106,10 +110,12 @@ public class FalconImporter {
                     List<ITypedInstance> locations = new ArrayList<>();
                     for (Location loc : cluster.getLocations().getLocations()) {
                         Struct location = new Struct(FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
-                        EnumType locationType = typeSystem.getDataType(EnumType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION_TYPE.name());
+                        EnumType locationType = typeSystem.getDataType(EnumType.class,
+                                FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION_TYPE.name());
                         location.set("type", locationType.fromValue(loc.getName().toUpperCase()));
                         location.set("path", loc.getPath());
-                        StructType type = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
+                        StructType type = typeSystem
+                                .getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_LOCATION.name());
                         locations.add(type.convert(location, Multiplicity.REQUIRED));
                     }
                     clusterRef.set("locations", locations);
@@ -122,7 +128,8 @@ public class FalconImporter {
                         interfaceStruct.set("type", interfaceFld.getType().name());
                         interfaceStruct.set("endpoint", interfaceFld.getEndpoint());
                         interfaceStruct.set("version", interfaceFld.getVersion());
-                        StructType type = typeSystem.getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
+                        StructType type = typeSystem
+                                .getDataType(StructType.class, FalconTypeSystem.DefinedTypes.CLUSTER_INTERFACE.name());
                         interfaces.add(type.convert(interfaceStruct, Multiplicity.REQUIRED));
                     }
                     clusterRef.set("interfaces", interfaces);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconTypeSystem.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconTypeSystem.java b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconTypeSystem.java
index 6cbd970..4e06777 100755
--- a/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconTypeSystem.java
+++ b/addons/falcon-bridge/src/main/java/org/apache/atlas/falcon/FalconTypeSystem.java
@@ -54,7 +54,7 @@ public class FalconTypeSystem {
 
     public static FalconTypeSystem getInstance() throws MetadataException {
         if (INSTANCE == null) {
-            synchronized(LOG) {
+            synchronized (LOG) {
                 if (INSTANCE == null) {
                     INSTANCE = new FalconTypeSystem();
                 }
@@ -73,12 +73,16 @@ public class FalconTypeSystem {
                 new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
                 new AttributeDefinition("acl", DefinedTypes.ACL.name(), Multiplicity.OPTIONAL, false, null),
                 new AttributeDefinition("tags", DefinedTypes.TAG.name(), Multiplicity.COLLECTION, false, null),
-                new AttributeDefinition("locations", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.COLLECTION, false, null),
-                new AttributeDefinition("interfaces", DefinedTypes.CLUSTER_INTERFACE.name(), Multiplicity.COLLECTION, false, null),
-                new AttributeDefinition("properties", TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(), Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition("locations",
+                        TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(),
+                        Multiplicity.COLLECTION, false, null),
+                new AttributeDefinition("interfaces", DefinedTypes.CLUSTER_INTERFACE.name(), Multiplicity.COLLECTION,
+                        false, null), new AttributeDefinition("properties",
+                TYPE_SYSTEM.defineMapType(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE).getName(),
+                Multiplicity.OPTIONAL, false, null),};
         HierarchicalTypeDefinition<ClassType> cluster =
-                new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.CLUSTER.name(), ImmutableList.<String>of(), attributeDefinitions);
+                new HierarchicalTypeDefinition<>(ClassType.class, DefinedTypes.CLUSTER.name(),
+                        ImmutableList.<String>of(), attributeDefinitions);
         LOG.debug("Created definition for " + DefinedTypes.CLUSTER.name());
         return cluster;
     }
@@ -86,57 +90,52 @@ public class FalconTypeSystem {
     private HierarchicalTypeDefinition<TraitType> defineTags() {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
                 new AttributeDefinition("name", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("value", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null)
-        };
+                new AttributeDefinition("value", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null)};
 
-        HierarchicalTypeDefinition<TraitType> traitType = new HierarchicalTypeDefinition<>(TraitType.class, DefinedTypes.TAG.name(), ImmutableList.<String>of(), attributeDefinitions);
+        HierarchicalTypeDefinition<TraitType> traitType =
+                new HierarchicalTypeDefinition<>(TraitType.class, DefinedTypes.TAG.name(), ImmutableList.<String>of(),
+                        attributeDefinitions);
         LOG.debug("Created definition for " + DefinedTypes.TAG.name());
         traitTypeDefinitions.add(traitType);
         return traitType;
     }
 
     private StructTypeDefinition defineClusterLocation() throws MetadataException {
-        EnumValue values[] = {
-                new EnumValue("WORKING", 1),
-                new EnumValue("STAGING", 2),
-                new EnumValue("TEMP", 3),
-        };
+        EnumValue values[] = {new EnumValue("WORKING", 1), new EnumValue("STAGING", 2), new EnumValue("TEMP", 3),};
 
         LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION_TYPE.name());
         EnumTypeDefinition locationType = new EnumTypeDefinition(DefinedTypes.CLUSTER_LOCATION_TYPE.name(), values);
         TYPE_SYSTEM.defineEnumType(locationType);
 
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("type", DefinedTypes.CLUSTER_LOCATION_TYPE.name(), Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("path", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
-        };
+                new AttributeDefinition("type", DefinedTypes.CLUSTER_LOCATION_TYPE.name(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("path", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),};
         LOG.debug("Created definition for " + DefinedTypes.CLUSTER_LOCATION.name());
-        StructTypeDefinition location = new StructTypeDefinition(DefinedTypes.CLUSTER_LOCATION.name(), attributeDefinitions);
+        StructTypeDefinition location =
+                new StructTypeDefinition(DefinedTypes.CLUSTER_LOCATION.name(), attributeDefinitions);
         structTypeDefinitions.add(location);
         return location;
     }
 
     private StructTypeDefinition defineClusterInterface() throws MetadataException {
-        EnumValue values[] = {
-                new EnumValue("READONLY", 1),
-                new EnumValue("WRITE", 2),
-                new EnumValue("EXECUTE", 3),
-                new EnumValue("WORKFLOW", 4),
-                new EnumValue("MESSAGING", 5),
-                new EnumValue("REGISTRY", 6),
-        };
+        EnumValue values[] = {new EnumValue("READONLY", 1), new EnumValue("WRITE", 2), new EnumValue("EXECUTE", 3),
+                new EnumValue("WORKFLOW", 4), new EnumValue("MESSAGING", 5), new EnumValue("REGISTRY", 6),};
 
         LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE_TYPE.name());
         EnumTypeDefinition interfaceType = new EnumTypeDefinition(DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), values);
         TYPE_SYSTEM.defineEnumType(interfaceType);
 
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("type", DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("endpoint", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("version", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
-        };
+                new AttributeDefinition("type", DefinedTypes.CLUSTER_INTERFACE_TYPE.name(), Multiplicity.REQUIRED,
+                        false, null),
+                new AttributeDefinition("endpoint", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("version", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),};
         LOG.debug("Created definition for " + DefinedTypes.CLUSTER_INTERFACE.name());
-        StructTypeDefinition interfaceEntity = new StructTypeDefinition(DefinedTypes.CLUSTER_INTERFACE.name(), attributeDefinitions);
+        StructTypeDefinition interfaceEntity =
+                new StructTypeDefinition(DefinedTypes.CLUSTER_INTERFACE.name(), attributeDefinitions);
         structTypeDefinitions.add(interfaceEntity);
         return interfaceEntity;
     }
@@ -154,13 +153,10 @@ public class FalconTypeSystem {
 
     private StructTypeDefinition defineACL() {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("group", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("permission", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("group", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("permission", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),};
         LOG.debug("Created definition for " + DefinedTypes.ACL.name());
         StructTypeDefinition acl = new StructTypeDefinition(DefinedTypes.ACL.name(), attributeDefinitions);
         structTypeDefinitions.add(acl);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconImporterTest.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconImporterTest.java b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconImporterTest.java
index 7562b89..924b2a4 100755
--- a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconImporterTest.java
+++ b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconImporterTest.java
@@ -48,11 +48,12 @@ public class FalconImporterTest {
         FalconTypeSystem.getInstance();
 
         FalconImporter importer = new FalconImporter(client, repo);
-        when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null,
-                null)).thenReturn(getEntityList());
+        when(client.getEntityList(EntityType.CLUSTER.name(), null, null, null, null, null, null, null))
+                .thenReturn(getEntityList());
         //TODO Set other fields in cluster
         when(client.getDefinition(anyString(), anyString())).thenReturn(getCluster());
-        when(repo.createEntity(any(IReferenceableInstance.class), anyString())).thenReturn(UUID.randomUUID().toString());
+        when(repo.createEntity(any(IReferenceableInstance.class), anyString()))
+                .thenReturn(UUID.randomUUID().toString());
 
         importer.importClusters();
     }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconTypeSystemTest.java
----------------------------------------------------------------------
diff --git a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconTypeSystemTest.java b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconTypeSystemTest.java
index cc2b65f..7667903 100755
--- a/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconTypeSystemTest.java
+++ b/addons/falcon-bridge/src/test/java/org/apache/atlas/falcon/FalconTypeSystemTest.java
@@ -29,7 +29,9 @@ public class FalconTypeSystemTest {
     @Test
     public void testTypeSystem() throws MetadataException {
         FalconTypeSystem.getInstance();
-        Assert.assertNotNull(TypeSystem.getInstance().getDataType(ClassType.class, FalconTypeSystem.DefinedTypes.CLUSTER.name()));
-        Assert.assertNotNull(TypeSystem.getInstance().getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name()));
+        Assert.assertNotNull(
+                TypeSystem.getInstance().getDataType(ClassType.class, FalconTypeSystem.DefinedTypes.CLUSTER.name()));
+        Assert.assertNotNull(
+                TypeSystem.getInstance().getDataType(TraitType.class, FalconTypeSystem.DefinedTypes.TAG.name()));
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
index d7ffa2b..58ad93d 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/bridge/HiveMetaStoreBridge.java
@@ -159,9 +159,8 @@ public class HiveMetaStoreBridge {
         LOG.debug("Getting reference for database {}", databaseName);
         String typeName = HiveDataTypes.HIVE_DB.getName();
 
-        String dslQuery = String.format("%s where %s = '%s' and %s = '%s'", typeName,
-                HiveDataModelGenerator.NAME, databaseName.toLowerCase(), HiveDataModelGenerator.CLUSTER_NAME,
-                clusterName);
+        String dslQuery = String.format("%s where %s = '%s' and %s = '%s'", typeName, HiveDataModelGenerator.NAME,
+                databaseName.toLowerCase(), HiveDataModelGenerator.CLUSTER_NAME, clusterName);
         return getEntityReferenceFromDSL(typeName, dslQuery);
     }
 
@@ -170,11 +169,12 @@ public class HiveMetaStoreBridge {
         String typeName = HiveDataTypes.HIVE_PROCESS.getName();
 
         //todo enable DSL
-//        String dslQuery = String.format("%s where queryText = \"%s\"", typeName, queryStr);
-//        return getEntityReferenceFromDSL(typeName, dslQuery);
+        //        String dslQuery = String.format("%s where queryText = \"%s\"", typeName, queryStr);
+        //        return getEntityReferenceFromDSL(typeName, dslQuery);
 
-        String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
-                typeName, typeName, StringEscapeUtils.escapeJava(queryStr));
+        String gremlinQuery =
+                String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
+                        StringEscapeUtils.escapeJava(queryStr));
         return getEntityReferenceFromGremlin(typeName, gremlinQuery);
     }
 
@@ -216,9 +216,8 @@ public class HiveMetaStoreBridge {
         return getEntityReferenceFromDSL(typeName, dslQuery);
     }
 
-    private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery) throws
-    AtlasServiceException,
-    JSONException {
+    private Referenceable getEntityReferenceFromGremlin(String typeName, String gremlinQuery)
+    throws AtlasServiceException, JSONException {
         AtlasClient client = getAtlasClient();
         JSONObject response = client.searchByGremlin(gremlinQuery);
         JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
@@ -236,7 +235,8 @@ public class HiveMetaStoreBridge {
 
         //todo replace gremlin with DSL
         //        String dslQuery = String.format("%s as p where values = %s, tableName where name = '%s', "
-        //                        + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr, tableName,
+        //                        + "dbName where name = '%s' and clusterName = '%s' select p", typeName, valuesStr,
+        // tableName,
         //                dbName, clusterName);
 
         String datasetType = AtlasClient.DATA_SET_SUPER_TYPE;
@@ -373,9 +373,8 @@ public class HiveMetaStoreBridge {
         return partRef;
     }
 
-    private void importIndexes(String db, String table,
-                               Referenceable dbReferenceable,
-                               Referenceable tableReferenceable) throws Exception {
+    private void importIndexes(String db, String table, Referenceable dbReferenceable, Referenceable tableReferenceable)
+    throws Exception {
         List<Index> indexes = hiveClient.getIndexes(db, table, Short.MAX_VALUE);
         if (indexes.size() > 0) {
             for (Index index : indexes) {
@@ -385,9 +384,8 @@ public class HiveMetaStoreBridge {
     }
 
     //todo should be idempotent
-    private void importIndex(Index index,
-                             Referenceable dbReferenceable,
-                             Referenceable tableReferenceable) throws Exception {
+    private void importIndex(Index index, Referenceable dbReferenceable, Referenceable tableReferenceable)
+            throws Exception {
         LOG.info("Importing index {} for {}.{}", index.getIndexName(), dbReferenceable, tableReferenceable);
         Referenceable indexRef = new Referenceable(HiveDataTypes.HIVE_INDEX.getName());
 
@@ -411,7 +409,8 @@ public class HiveMetaStoreBridge {
         createInstance(indexRef);
     }
 
-    private Referenceable fillStorageDescStruct(StorageDescriptor storageDesc, List<Referenceable> colList) throws Exception {
+    private Referenceable fillStorageDescStruct(StorageDescriptor storageDesc, List<Referenceable> colList)
+    throws Exception {
         LOG.debug("Filling storage descriptor information for " + storageDesc);
 
         Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
@@ -429,7 +428,8 @@ public class HiveMetaStoreBridge {
 
         sdReferenceable.set("serdeInfo", serdeInfoStruct);
         sdReferenceable.set(HiveDataModelGenerator.STORAGE_NUM_BUCKETS, storageDesc.getNumBuckets());
-        sdReferenceable.set(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());
+        sdReferenceable
+                .set(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS, storageDesc.isStoredAsSubDirectories());
 
         //Use the passed column list if not null, ex: use same references for table and SD
         List<FieldSchema> columns = storageDesc.getCols();
@@ -469,8 +469,7 @@ public class HiveMetaStoreBridge {
         return createInstance(sdReferenceable);
     }
 
-    private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception
-    {
+    private List<Referenceable> getColumns(List<FieldSchema> schemaList) throws Exception {
         List<Referenceable> colList = new ArrayList<>();
         for (FieldSchema fs : schemaList) {
             LOG.debug("Processing field " + fs);
@@ -489,7 +488,7 @@ public class HiveMetaStoreBridge {
         AtlasClient dgiClient = getAtlasClient();
 
         //Register hive data model if its not already registered
-        if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null ) {
+        if (dgiClient.getType(HiveDataTypes.HIVE_PROCESS.getName()) == null) {
             LOG.info("Registering Hive data model");
             dgiClient.createType(dataModelGenerator.getModelAsJson());
         } else {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
index 26772e3..b48fc0a 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/hook/HiveHook.java
@@ -104,18 +104,18 @@ public class HiveHook implements ExecuteWithHookContext {
 
         try {
             Runtime.getRuntime().addShutdownHook(new Thread() {
-                        @Override
-                        public void run() {
-                            try {
-                                executor.shutdown();
-                                executor.awaitTermination(WAIT_TIME, TimeUnit.SECONDS);
-                                executor = null;
-                            } catch (InterruptedException ie) {
-                                LOG.info("Interrupt received in shutdown.");
-                            }
-                            // shutdown client
-                        }
-                    });
+                @Override
+                public void run() {
+                    try {
+                        executor.shutdown();
+                        executor.awaitTermination(WAIT_TIME, TimeUnit.SECONDS);
+                        executor = null;
+                    } catch (InterruptedException ie) {
+                        LOG.info("Interrupt received in shutdown.");
+                    }
+                    // shutdown client
+                }
+            });
         } catch (IllegalStateException is) {
             LOG.info("Attempting to send msg while shutdown in progress.");
         }
@@ -163,15 +163,15 @@ public class HiveHook implements ExecuteWithHookContext {
             fireAndForget(event);
         } else {
             executor.submit(new Runnable() {
-                        @Override
-                        public void run() {
-                            try {
-                                fireAndForget(event);
-                            } catch (Throwable e) {
-                                LOG.info("DGI hook failed", e);
-                            }
-                        }
-                    });
+                @Override
+                public void run() {
+                    try {
+                        fireAndForget(event);
+                    } catch (Throwable e) {
+                        LOG.info("DGI hook failed", e);
+                    }
+                }
+            });
         }
     }
 
@@ -348,7 +348,7 @@ public class HiveHook implements ExecuteWithHookContext {
             explain.initialize(event.conf, event.queryPlan, null);
             List<Task<?>> rootTasks = event.queryPlan.getRootTasks();
             return explain.getJSONPlan(null, null, rootTasks, event.queryPlan.getFetchTask(), true, false, false);
-        } catch(Exception e) {
+        } catch (Exception e) {
             LOG.warn("Failed to get queryplan", e);
             return new JSONObject();
         }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
index e607b16..cb8359e 100755
--- a/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
+++ b/addons/hive-bridge/src/main/java/org/apache/atlas/hive/model/HiveDataModelGenerator.java
@@ -19,8 +19,8 @@
 package org.apache.atlas.hive.model;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.atlas.AtlasException;
 import org.apache.atlas.AtlasClient;
+import org.apache.atlas.AtlasException;
 import org.apache.atlas.typesystem.TypesDef;
 import org.apache.atlas.typesystem.json.TypesSerialization;
 import org.apache.atlas.typesystem.types.AttributeDefinition;
@@ -102,12 +102,8 @@ public class HiveDataModelGenerator {
     }
 
     public TypesDef getTypesDef() {
-        return TypeUtils.getTypesDef(
-                getEnumTypeDefinitions(),
-                getStructTypeDefinitions(),
-                getTraitTypeDefinitions(),
-                getClassTypeDefinitions()
-        );
+        return TypeUtils.getTypesDef(getEnumTypeDefinitions(), getStructTypeDefinitions(), getTraitTypeDefinitions(),
+                getClassTypeDefinitions());
     }
 
     public String getDataModelAsJSON() {
@@ -131,108 +127,82 @@ public class HiveDataModelGenerator {
     }
 
     private void createHiveObjectTypeEnum() throws AtlasException {
-        EnumValue values[] = {
-                new EnumValue("GLOBAL", 1),
-                new EnumValue("DATABASE", 2),
-                new EnumValue("TABLE", 3),
-                new EnumValue("PARTITION", 4),
-                new EnumValue("COLUMN", 5),
-        };
-
-        EnumTypeDefinition definition = new EnumTypeDefinition(
-                HiveDataTypes.HIVE_OBJECT_TYPE.getName(), values);
+        EnumValue values[] = {new EnumValue("GLOBAL", 1), new EnumValue("DATABASE", 2), new EnumValue("TABLE", 3),
+                new EnumValue("PARTITION", 4), new EnumValue("COLUMN", 5),};
+
+        EnumTypeDefinition definition = new EnumTypeDefinition(HiveDataTypes.HIVE_OBJECT_TYPE.getName(), values);
         enumTypeDefinitionMap.put(HiveDataTypes.HIVE_OBJECT_TYPE.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_OBJECT_TYPE.getName());
     }
 
     private void createHivePrincipalTypeEnum() throws AtlasException {
-        EnumValue values[] = {
-                new EnumValue("USER", 1),
-                new EnumValue("ROLE", 2),
-                new EnumValue("GROUP", 3),
-        };
+        EnumValue values[] = {new EnumValue("USER", 1), new EnumValue("ROLE", 2), new EnumValue("GROUP", 3),};
 
-        EnumTypeDefinition definition = new EnumTypeDefinition(
-                HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), values);
+        EnumTypeDefinition definition = new EnumTypeDefinition(HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), values);
 
         enumTypeDefinitionMap.put(HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName());
     }
 
     private void createResourceTypeEnum() throws AtlasException {
-        EnumValue values[] = {
-                new EnumValue("JAR", 1),
-                new EnumValue("FILE", 2),
-                new EnumValue("ARCHIVE", 3),
-        };
-        EnumTypeDefinition definition = new EnumTypeDefinition(
-                HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), values);
+        EnumValue values[] = {new EnumValue("JAR", 1), new EnumValue("FILE", 2), new EnumValue("ARCHIVE", 3),};
+        EnumTypeDefinition definition = new EnumTypeDefinition(HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), values);
         enumTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCE_TYPE.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCE_TYPE.getName());
     }
 
     private void createSerDeStruct() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("serializationLib", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
-        StructTypeDefinition definition = new StructTypeDefinition(
-                HiveDataTypes.HIVE_SERDE.getName(), attributeDefinitions);
+                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("serializationLib", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
+                        false, null),
+                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
+        StructTypeDefinition definition =
+                new StructTypeDefinition(HiveDataTypes.HIVE_SERDE.getName(), attributeDefinitions);
         structTypeDefinitionMap.put(HiveDataTypes.HIVE_SERDE.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_SERDE.getName());
     }
 
     private void createOrderStruct() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("col", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("order", DataTypes.INT_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-        };
+                new AttributeDefinition("col", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("order", DataTypes.INT_TYPE.getName(), Multiplicity.REQUIRED, false, null),};
 
-        StructTypeDefinition definition = new StructTypeDefinition(
-                HiveDataTypes.HIVE_ORDER.getName(), attributeDefinitions);
+        StructTypeDefinition definition =
+                new StructTypeDefinition(HiveDataTypes.HIVE_ORDER.getName(), attributeDefinitions);
         structTypeDefinitionMap.put(HiveDataTypes.HIVE_ORDER.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_ORDER.getName());
     }
 
     private void createStorageDescClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("cols",
-                        String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
+                new AttributeDefinition("cols", String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
                         Multiplicity.COLLECTION, false, null),
-                new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition(STORAGE_NUM_BUCKETS, DataTypes.INT_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("bucketCols",
-                        String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("sortCols",
-                        String.format("array<%s>", HiveDataTypes.HIVE_ORDER.getName()),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("location", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("inputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("outputFormat", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("compressed", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition(STORAGE_NUM_BUCKETS, DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("serdeInfo", HiveDataTypes.HIVE_SERDE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("bucketCols", String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
+                        Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("sortCols", String.format("array<%s>", HiveDataTypes.HIVE_ORDER.getName()),
+                        Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
                 //new AttributeDefinition("skewedInfo", DefinedTypes.HIVE_SKEWEDINFO.getName(),
                 // Multiplicity.OPTIONAL, false, null),
                 new AttributeDefinition(STORAGE_IS_STORED_AS_SUB_DIRS, DataTypes.BOOLEAN_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                        Multiplicity.OPTIONAL, false, null),};
 
-        HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
-                ClassType.class, HiveDataTypes.HIVE_STORAGEDESC.getName(), null, attributeDefinitions);
+        HierarchicalTypeDefinition<ClassType> definition =
+                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_STORAGEDESC.getName(), null,
+                        attributeDefinitions);
         classTypeDefinitions.put(HiveDataTypes.HIVE_STORAGEDESC.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_STORAGEDESC.getName());
     }
@@ -243,54 +213,45 @@ public class HiveDataModelGenerator {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
                 new AttributeDefinition("resourceType", HiveDataTypes.HIVE_RESOURCE_TYPE.getName(),
                         Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("uri", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-        };
-        StructTypeDefinition definition = new StructTypeDefinition(
-                HiveDataTypes.HIVE_RESOURCEURI.getName(), attributeDefinitions);
+                new AttributeDefinition("uri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),};
+        StructTypeDefinition definition =
+                new StructTypeDefinition(HiveDataTypes.HIVE_RESOURCEURI.getName(), attributeDefinitions);
         structTypeDefinitionMap.put(HiveDataTypes.HIVE_RESOURCEURI.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_RESOURCEURI.getName());
     }
 
     private void createDBClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition(CLUSTER_NAME, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition(CLUSTER_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("description", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("locationUri", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("ownerType", HiveDataTypes.HIVE_PRINCIPAL_TYPE.getName(), Multiplicity.OPTIONAL,
+                        false, null),};
 
         HierarchicalTypeDefinition<ClassType> definition =
-                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_DB.getName(),
-                        null, attributeDefinitions);
+                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_DB.getName(), null,
+                        attributeDefinitions);
         classTypeDefinitions.put(HiveDataTypes.HIVE_DB.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_DB.getName());
     }
 
     private void createTypeClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("type1", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("type2", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("fields", String.format("array<%s>",
-                        HiveDataTypes.HIVE_COLUMN.getName()), Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("type1", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("type2", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("fields", String.format("array<%s>", HiveDataTypes.HIVE_COLUMN.getName()),
+                        Multiplicity.OPTIONAL, false, null),};
         HierarchicalTypeDefinition<ClassType> definition =
-                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TYPE.getName(),
-                        null, attributeDefinitions);
+                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TYPE.getName(), null,
+                        attributeDefinitions);
 
         classTypeDefinitions.put(HiveDataTypes.HIVE_TYPE.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_TYPE.getName());
@@ -298,17 +259,12 @@ public class HiveDataModelGenerator {
 
     private void createColumnClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("type", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
         HierarchicalTypeDefinition<ClassType> definition =
-                new HierarchicalTypeDefinition<>(
-                        ClassType.class, HiveDataTypes.HIVE_COLUMN.getName(),
-                        null, attributeDefinitions);
+                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_COLUMN.getName(), null,
+                        attributeDefinitions);
         classTypeDefinitions.put(HiveDataTypes.HIVE_COLUMN.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_COLUMN.getName());
     }
@@ -317,62 +273,50 @@ public class HiveDataModelGenerator {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
                 new AttributeDefinition("values", DataTypes.arrayTypeName(DataTypes.STRING_TYPE.getName()),
                         Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("columns",
-                        DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
+                new AttributeDefinition(TABLE, HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
                         Multiplicity.OPTIONAL, true, null),
-                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),};
         HierarchicalTypeDefinition<ClassType> definition =
-                new HierarchicalTypeDefinition<>(ClassType.class,
-                        HiveDataTypes.HIVE_PARTITION.getName(), null, attributeDefinitions);
+                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_PARTITION.getName(), null,
+                        attributeDefinitions);
         classTypeDefinitions.put(HiveDataTypes.HIVE_PARTITION.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_PARTITION.getName());
     }
 
     private void createTableClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("partitionKeys",
-                        DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("columns",
-                        DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
+                new AttributeDefinition(TABLE_NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("owner", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition(COMMENT, DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("retention", DataTypes.INT_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("partitionKeys", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
+                        Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("columns", DataTypes.arrayTypeName(HiveDataTypes.HIVE_COLUMN.getName()),
                         Multiplicity.OPTIONAL, true, null),
-                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("tableType", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("temporary", DataTypes.BOOLEAN_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("viewOriginalText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
+                        false, null),
+                new AttributeDefinition("viewExpandedText", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL,
+                        false, null),
+                new AttributeDefinition("tableType", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("temporary", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),};
         HierarchicalTypeDefinition<ClassType> definition =
                 new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_TABLE.getName(),
                         ImmutableList.of("DataSet"), attributeDefinitions);
@@ -382,27 +326,23 @@ public class HiveDataModelGenerator {
 
     private void createIndexClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("indexHandlerClass", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("origTable", HiveDataTypes.HIVE_TABLE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("indexTable", HiveDataTypes.HIVE_TABLE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-                new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition(NAME, DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("indexHandlerClass", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED,
+                        false, null),
+                new AttributeDefinition(DB, HiveDataTypes.HIVE_DB.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("lastAccessTime", DataTypes.LONG_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("origTable", HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("indexTable", HiveDataTypes.HIVE_TABLE.getName(), Multiplicity.OPTIONAL, false,
+                        null),
+                new AttributeDefinition("sd", HiveDataTypes.HIVE_STORAGEDESC.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("parameters", STRING_MAP_TYPE.getName(), Multiplicity.OPTIONAL, false, null),
+                new AttributeDefinition("deferredRebuild", DataTypes.BOOLEAN_TYPE.getName(), Multiplicity.OPTIONAL,
+                        false, null),};
 
         HierarchicalTypeDefinition<ClassType> definition =
                 new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_INDEX.getName(),
@@ -413,15 +353,15 @@ public class HiveDataModelGenerator {
 
     private void createRoleClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("roleName", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-        };
-        HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
-                ClassType.class, HiveDataTypes.HIVE_ROLE.getName(), null, attributeDefinitions);
+                new AttributeDefinition("roleName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("createTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("ownerName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),};
+        HierarchicalTypeDefinition<ClassType> definition =
+                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_ROLE.getName(), null,
+                        attributeDefinitions);
 
         classTypeDefinitions.put(HiveDataTypes.HIVE_ROLE.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_ROLE.getName());
@@ -429,25 +369,21 @@ public class HiveDataModelGenerator {
 
     private void createProcessClass() throws AtlasException {
         AttributeDefinition[] attributeDefinitions = new AttributeDefinition[]{
-                new AttributeDefinition("startTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("endTime", DataTypes.LONG_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("userName", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("queryText", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("queryPlan", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("queryId", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.REQUIRED, false, null),
-                new AttributeDefinition("queryGraph", DataTypes.STRING_TYPE.getName(),
-                        Multiplicity.OPTIONAL, false, null),
-        };
+                new AttributeDefinition("startTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("endTime", DataTypes.LONG_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("userName", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("queryText", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("queryPlan", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false,
+                        null),
+                new AttributeDefinition("queryId", DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("queryGraph", DataTypes.STRING_TYPE.getName(), Multiplicity.OPTIONAL, false,
+                        null),};
 
-        HierarchicalTypeDefinition<ClassType> definition = new HierarchicalTypeDefinition<>(
-                ClassType.class, HiveDataTypes.HIVE_PROCESS.getName(),
-                ImmutableList.of(AtlasClient.PROCESS_SUPER_TYPE), attributeDefinitions);
+        HierarchicalTypeDefinition<ClassType> definition =
+                new HierarchicalTypeDefinition<>(ClassType.class, HiveDataTypes.HIVE_PROCESS.getName(),
+                        ImmutableList.of(AtlasClient.PROCESS_SUPER_TYPE), attributeDefinitions);
         classTypeDefinitions.put(HiveDataTypes.HIVE_PROCESS.getName(), definition);
         LOG.debug("Created definition for " + HiveDataTypes.HIVE_PROCESS.getName());
     }

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
index 084e831..6cc3f33 100644
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/BaseSSLAndKerberosTest.java
@@ -70,27 +70,22 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
         file.delete();
         conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
 
-        CredentialProvider provider =
-                CredentialProviderFactory.getProviders(conf).get(0);
+        CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
 
         // create new aliases
         try {
 
             char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    KEYSTORE_PASSWORD_KEY, storepass);
+            provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass);
 
             char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    TRUSTSTORE_PASSWORD_KEY, trustpass);
+            provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass);
 
             char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    "ssl.client.truststore.password", trustpass2);
+            provider.createCredentialEntry("ssl.client.truststore.password", trustpass2);
 
             char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    SERVER_CERT_PASSWORD_KEY, certpass);
+            provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass);
 
             // write out so that it can be found in checks
             provider.flush();
@@ -132,8 +127,7 @@ public class BaseSSLAndKerberosTest extends BaseSecurityTest {
         hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "");
         hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, HiveHook.class.getName());
         hiveConf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
-        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE,
-                System.getProperty("user.dir") + "/target/atlas");
+        hiveConf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, System.getProperty("user.dir") + "/target/atlas");
         hiveConf.set(HiveMetaStoreBridge.DGI_URL_PROPERTY, DGI_URL);
         hiveConf.set("javax.jdo.option.ConnectionURL", "jdbc:derby:./target/metastore_db;create=true");
         hiveConf.set("hive.hook.dgi.synchronous", "true");

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
index 34cbc78..95f4eeb 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/HiveHookIT.java
@@ -121,8 +121,8 @@ public class HiveHookIT {
 
     private String createTable(boolean partition) throws Exception {
         String tableName = tableName();
-        runCommand("create table " + tableName + "(id int, name string) comment 'table comment' "
-                + (partition ? " partitioned by(dt string)" : ""));
+        runCommand("create table " + tableName + "(id int, name string) comment 'table comment' " + (partition ?
+                " partitioned by(dt string)" : ""));
         return tableName;
     }
 
@@ -146,7 +146,7 @@ public class HiveHookIT {
 
         final Id sdId = (Id) tableRef.get("sd");
         Referenceable sdRef = dgiCLient.getEntity(sdId.id);
-        Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS),false);
+        Assert.assertEquals(sdRef.get(HiveDataModelGenerator.STORAGE_IS_STORED_AS_SUB_DIRS), false);
 
         //Create table where database doesn't exist, will create database instance as well
         assertDatabaseIsRegistered(DEFAULT_DB);
@@ -154,7 +154,8 @@ public class HiveHookIT {
 
     private String assertColumnIsRegistered(String colName) throws Exception {
         LOG.debug("Searching for column {}", colName);
-        String query = String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
+        String query =
+                String.format("%s where name = '%s'", HiveDataTypes.HIVE_COLUMN.getName(), colName.toLowerCase());
         return assertEntityIsRegistered(query, true);
 
     }
@@ -196,8 +197,9 @@ public class HiveHookIT {
     public void testInsert() throws Exception {
         String tableName = createTable();
         String insertTableName = createTable();
-        String query = "insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from "
-                + tableName + " where dt = '2015-01-01'";
+        String query =
+                "insert into " + insertTableName + " partition(dt = '2015-01-01') select id, name from " + tableName
+                        + " where dt = '2015-01-01'";
 
         runCommand(query);
         assertProcessIsRegistered(query);
@@ -278,13 +280,14 @@ public class HiveHookIT {
     }
 
     private void assertProcessIsRegistered(String queryStr) throws Exception {
-//        String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
-//                normalize(queryStr));
-//        assertEntityIsRegistered(dslQuery, true);
+        //        String dslQuery = String.format("%s where queryText = \"%s\"", HiveDataTypes.HIVE_PROCESS.getName(),
+        //                normalize(queryStr));
+        //        assertEntityIsRegistered(dslQuery, true);
         //todo replace with DSL
         String typeName = HiveDataTypes.HIVE_PROCESS.getName();
-        String gremlinQuery = String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()",
-                typeName, typeName, normalize(queryStr));
+        String gremlinQuery =
+                String.format("g.V.has('__typeName', '%s').has('%s.queryText', \"%s\").toList()", typeName, typeName,
+                        normalize(queryStr));
         JSONObject response = dgiCLient.searchByGremlin(gremlinQuery);
         JSONArray results = response.getJSONArray(AtlasClient.RESULTS);
         Assert.assertEquals(results.length(), 1);
@@ -307,9 +310,9 @@ public class HiveHookIT {
 
     private String assertTableIsRegistered(String dbName, String tableName, boolean registered) throws Exception {
         LOG.debug("Searching for table {}.{}", dbName, tableName);
-        String query = String.format("%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'"
-                + " select t", HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(),
-                CLUSTER_NAME);
+        String query = String.format(
+                "%s as t where tableName = '%s', db where name = '%s' and clusterName = '%s'" + " select t",
+                HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), CLUSTER_NAME);
         return assertEntityIsRegistered(query, registered);
     }
 
@@ -336,7 +339,7 @@ public class HiveHookIT {
         Assert.assertEquals(results.length(), 1);
     }
 
-    private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception{
+    private String assertEntityIsRegistered(String dslQuery, boolean registered) throws Exception {
         JSONArray results = dgiCLient.searchByDSL(dslQuery);
         if (registered) {
             Assert.assertEquals(results.length(), 1);

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
index 6caa4fc..629a9f3 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/NegativeSSLAndKerberosHiveHookIT.java
@@ -92,7 +92,8 @@ public class NegativeSSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
         configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
         configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
         configuration.setProperty("atlas.http.authentication.type", "kerberos");
-        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
+        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
+                SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
 
         configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
index 3eb7bba..16f93d6 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLAndKerberosHiveHookIT.java
@@ -18,8 +18,8 @@
 
 package org.apache.atlas.hive.hook;
 
-import org.apache.atlas.AtlasException;
 import org.apache.atlas.AtlasClient;
+import org.apache.atlas.AtlasException;
 import org.apache.atlas.PropertiesUtil;
 import org.apache.atlas.hive.model.HiveDataTypes;
 import org.apache.atlas.security.SecurityProperties;
@@ -107,7 +107,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
         configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
         configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
         configuration.setProperty("atlas.http.authentication.type", "kerberos");
-        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
+        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
+                SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
 
         configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
 
@@ -215,7 +216,8 @@ public class SSLAndKerberosHiveHookIT extends BaseSSLAndKerberosTest {
         assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
     }
 
-    private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue) throws Exception {
+    private void assertInstanceIsRegistered(final String typeName, final String colName, final String colValue)
+    throws Exception {
         Subject.doAs(subject, new PrivilegedExceptionAction<Object>() {
             @Override
             public Object run() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/fef50cee/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
----------------------------------------------------------------------
diff --git a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
index 849cf1b..daf7e08 100755
--- a/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
+++ b/addons/hive-bridge/src/test/java/org/apache/atlas/hive/hook/SSLHiveHookIT.java
@@ -18,8 +18,8 @@
 
 package org.apache.atlas.hive.hook;
 
-import org.apache.atlas.AtlasException;
 import org.apache.atlas.AtlasClient;
+import org.apache.atlas.AtlasException;
 import org.apache.atlas.hive.bridge.HiveMetaStoreBridge;
 import org.apache.atlas.hive.model.HiveDataTypes;
 import org.apache.atlas.security.SecurityProperties;
@@ -73,7 +73,9 @@ public class SSLHiveHookIT {
             super(port, path);
         }
 
-        public Server getServer () { return server; }
+        public Server getServer() {
+            return server;
+        }
 
         @Override
         public PropertiesConfiguration getConfiguration() {
@@ -113,7 +115,8 @@ public class SSLHiveHookIT {
         configuration.setProperty(TRUSTSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
         configuration.setProperty(KEYSTORE_FILE_KEY, "../../webapp/target/atlas.keystore");
         configuration.setProperty(CERT_STORES_CREDENTIAL_PROVIDER_PATH, providerUrl);
-        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY, SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
+        configuration.setProperty(SSLFactory.SSL_HOSTNAME_VERIFIER_KEY,
+                SSLHostnameVerifier.DEFAULT_AND_LOCALHOST.toString());
 
         configuration.save(new FileWriter(persistDir + File.separator + "client.properties"));
 
@@ -153,27 +156,22 @@ public class SSLHiveHookIT {
         file.delete();
         conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerUrl);
 
-        CredentialProvider provider =
-                CredentialProviderFactory.getProviders(conf).get(0);
+        CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0);
 
         // create new aliases
         try {
 
             char[] storepass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    KEYSTORE_PASSWORD_KEY, storepass);
+            provider.createCredentialEntry(KEYSTORE_PASSWORD_KEY, storepass);
 
             char[] trustpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    TRUSTSTORE_PASSWORD_KEY, trustpass);
+            provider.createCredentialEntry(TRUSTSTORE_PASSWORD_KEY, trustpass);
 
             char[] trustpass2 = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    "ssl.client.truststore.password", trustpass2);
+            provider.createCredentialEntry("ssl.client.truststore.password", trustpass2);
 
             char[] certpass = {'k', 'e', 'y', 'p', 'a', 's', 's'};
-            provider.createCredentialEntry(
-                    SERVER_CERT_PASSWORD_KEY, certpass);
+            provider.createCredentialEntry(SERVER_CERT_PASSWORD_KEY, certpass);
 
             // write out so that it can be found in checks
             provider.flush();
@@ -217,7 +215,7 @@ public class SSLHiveHookIT {
         assertInstanceIsRegistered(HiveDataTypes.HIVE_DB.getName(), "name", dbName);
     }
 
-    private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception{
+    private void assertInstanceIsRegistered(String typeName, String colName, String colValue) throws Exception {
         JSONArray results = dgiCLient.rawSearch(typeName, colName, colValue);
         Assert.assertEquals(results.length(), 1);
     }


Mime
View raw message