atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jma...@apache.org
Subject [28/51] [abbrv] incubator-atlas git commit: Refactor packages and scripts to Atlas (cherry picked from commit 414beba)
Date Sun, 14 Jun 2015 17:45:08 GMT
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
new file mode 100755
index 0000000..642a8eb
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/graph/GraphBackedMetadataRepositoryTest.java
@@ -0,0 +1,676 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.graph;
+
+import com.google.common.collect.ImmutableList;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.tinkerpop.blueprints.Compare;
+import com.tinkerpop.blueprints.GraphQuery;
+import com.tinkerpop.blueprints.Vertex;
+import org.apache.atlas.RepositoryMetadataModule;
+import org.apache.atlas.TestUtils;
+import org.apache.atlas.discovery.graph.GraphBackedDiscoveryService;
+import org.apache.atlas.repository.BaseTest;
+import org.apache.atlas.repository.Constants;
+import org.apache.atlas.repository.EntityNotFoundException;
+import org.apache.atlas.repository.RepositoryException;
+import org.apache.atlas.typesystem.IStruct;
+import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.typesystem.ITypedStruct;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.typesystem.types.AttributeDefinition;
+import org.apache.atlas.typesystem.types.ClassType;
+import org.apache.atlas.typesystem.types.DataTypes;
+import org.apache.atlas.typesystem.types.EnumTypeDefinition;
+import org.apache.atlas.typesystem.types.EnumValue;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.StructTypeDefinition;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.apache.commons.lang.RandomStringUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONObject;
+import org.testng.Assert;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Guice;
+import org.testng.annotations.Test;
+import scala.actors.threadpool.Arrays;
+
+import javax.inject.Inject;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * GraphBackedMetadataRepository test
+ *
+ * Guice loads the dependencies and injects the necessary objects
+ *
+ */
+@Guice(modules = RepositoryMetadataModule.class)
+public class GraphBackedMetadataRepositoryTest {
+
+    private static final String ENTITY_TYPE = "Department";
+    private static final String DATABASE_TYPE = "hive_database";
+    private static final String DATABASE_NAME = "foo";
+    private static final String TABLE_TYPE = "hive_table";
+    private static final String TABLE_NAME = "bar";
+    private static final String CLASSIFICATION = "classification";
+    private static final String PII = "PII";
+    private static final String SUPER_TYPE_NAME = "Base";
+
+    @Inject
+    private GraphProvider<TitanGraph> graphProvider;
+
+    @Inject
+    private GraphBackedMetadataRepository repositoryService;
+
+    @Inject
+    private GraphBackedDiscoveryService discoveryService;
+
+    private TypeSystem typeSystem;
+    private String guid;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        typeSystem = TypeSystem.getInstance();
+        typeSystem.reset();
+
+        new GraphBackedSearchIndexer(graphProvider);
+
+        TestUtils.defineDeptEmployeeTypes(typeSystem);
+        createHiveTypes();
+    }
+
+/*
+    @AfterMethod
+    public void tearDown() throws Exception {
+         TestUtils.dumpGraph(graphProvider.get());
+    }
+*/
+
+    @Test
+    public void testSubmitEntity() throws Exception {
+        Referenceable hrDept = TestUtils.createDeptEg1(typeSystem);
+        ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
+        ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
+
+        guid = repositoryService.createEntity(hrDept2);
+        Assert.assertNotNull(guid);
+    }
+
+    @Test(dependsOnMethods = "testSubmitEntity")
+    public void testGetEntityDefinitionForDepartment() throws Exception {
+        ITypedReferenceableInstance entity = repositoryService.getEntityDefinition(guid);
+        Assert.assertNotNull(entity);
+    }
+
+    @Test (expectedExceptions = RepositoryException.class)
+    public void testGetEntityDefinitionNonExistent() throws Exception {
+        repositoryService.getEntityDefinition("blah");
+        Assert.fail();
+    }
+
+    @Test
+    public void testGetEntityList() throws Exception {
+        List<String> entityList = repositoryService.getEntityList(ENTITY_TYPE);
+        System.out.println("entityList = " + entityList);
+        Assert.assertNotNull(entityList);
+        Assert.assertEquals(entityList.size(), 1); // one department
+    }
+
+    @Test
+    public void testGetTypeAttributeName() throws Exception {
+        Assert.assertEquals(
+                repositoryService.getTypeAttributeName(), Constants.ENTITY_TYPE_PROPERTY_KEY);
+    }
+
+    @Test (dependsOnMethods = "testSubmitEntity")
+    public void testGetTraitLabel() throws Exception {
+        Assert.assertEquals(repositoryService.getTraitLabel(
+                typeSystem.getDataType(ClassType.class, TABLE_TYPE),
+                CLASSIFICATION), TABLE_TYPE + "." + CLASSIFICATION);
+    }
+
+    @Test
+    public void testCreateEntity() throws Exception {
+        Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
+        databaseInstance.set("name", DATABASE_NAME);
+        databaseInstance.set("description", "foo database");
+        databaseInstance.set("created", new Date(BaseTest.TEST_DATE_IN_LONG));
+
+        databaseInstance.set("namespace", "colo:cluster:hive:db");
+        databaseInstance.set("cluster", "cluster-1");
+        databaseInstance.set("colo", "colo-1");
+        System.out.println("databaseInstance = " + databaseInstance);
+
+        ClassType dbType = typeSystem.getDataType(ClassType.class, DATABASE_TYPE);
+        ITypedReferenceableInstance db = dbType.convert(databaseInstance, Multiplicity.REQUIRED);
+        System.out.println("db = " + db);
+
+        String dbGUID = repositoryService.createEntity(db);
+        System.out.println("added db = " + dbGUID);
+
+        Referenceable dbInstance = new Referenceable(
+                dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
+
+        ITypedReferenceableInstance table = createHiveTableInstance(dbInstance);
+        String tableGUID = repositoryService.createEntity(table);
+        System.out.println("added table = " + tableGUID);
+    }
+
+    @Test(dependsOnMethods = "testCreateEntity")
+    public void testGetEntityDefinition() throws Exception {
+        String guid = getGUID();
+
+        ITypedReferenceableInstance table = repositoryService.getEntityDefinition(guid);
+        Assert.assertEquals(table.getDate("created"), new Date(BaseTest.TEST_DATE_IN_LONG));
+        System.out.println("*** table = " + table);
+    }
+
+    private String getGUID() {
+        Vertex tableVertex = getTableEntityVertex();
+
+        String guid = tableVertex.getProperty(Constants.GUID_PROPERTY_KEY);
+        if (guid == null) {
+            Assert.fail();
+        }
+        return guid;
+    }
+
+    private Vertex getTableEntityVertex() {
+        TitanGraph graph = graphProvider.get();
+        GraphQuery query = graph.query()
+                .has(Constants.ENTITY_TYPE_PROPERTY_KEY, Compare.EQUAL, TABLE_TYPE);
+        Iterator<Vertex> results = query.vertices().iterator();
+        // returning one since guid should be unique
+        Vertex tableVertex = results.hasNext() ? results.next() : null;
+        if (tableVertex == null) {
+            Assert.fail();
+        }
+
+        return tableVertex;
+    }
+
+    @Test (dependsOnMethods = "testCreateEntity")
+    public void testGetTraitNames() throws Exception {
+        final List<String> traitNames = repositoryService.getTraitNames(getGUID());
+        Assert.assertEquals(traitNames.size(), 1);
+        Assert.assertEquals(traitNames, Arrays.asList(new String[]{CLASSIFICATION}));
+    }
+
+    @Test
+    public void testGetTraitNamesForEmptyTraits() throws Exception {
+        final List<String> traitNames = repositoryService.getTraitNames(guid);
+        Assert.assertEquals(traitNames.size(), 0);
+    }
+
+    @Test (expectedExceptions = EntityNotFoundException.class)
+    public void testGetTraitNamesForBadEntity() throws Exception {
+        repositoryService.getTraitNames(UUID.randomUUID().toString());
+        Assert.fail();
+    }
+
+    @Test (dependsOnMethods = "testGetTraitNames")
+    public void testAddTrait() throws Exception {
+        final String aGUID = getGUID();
+
+        List<String> traitNames = repositoryService.getTraitNames(aGUID);
+        System.out.println("traitNames = " + traitNames);
+        Assert.assertEquals(traitNames.size(), 1);
+        Assert.assertTrue(traitNames.contains(CLASSIFICATION));
+        Assert.assertFalse(traitNames.contains(PII));
+
+        HierarchicalTypeDefinition<TraitType> piiTrait =
+                TypesUtil.createTraitTypeDef(PII, ImmutableList.<String>of());
+        TraitType traitType = typeSystem.defineTraitType(piiTrait);
+        ITypedStruct traitInstance = traitType.createInstance();
+
+        repositoryService.addTrait(aGUID, traitInstance);
+
+        // refresh trait names
+        traitNames = repositoryService.getTraitNames(aGUID);
+        Assert.assertEquals(traitNames.size(), 2);
+        Assert.assertTrue(traitNames.contains(PII));
+        Assert.assertTrue(traitNames.contains(CLASSIFICATION));
+    }
+
+    @Test (dependsOnMethods = "testAddTrait")
+    public void testAddTraitWithAttribute() throws Exception {
+        final String aGUID = getGUID();
+        final String traitName = "P_I_I";
+
+        HierarchicalTypeDefinition<TraitType> piiTrait =
+                TypesUtil.createTraitTypeDef(traitName, ImmutableList.<String>of(),
+                TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
+        TraitType traitType = typeSystem.defineTraitType(piiTrait);
+        ITypedStruct traitInstance = traitType.createInstance();
+        traitInstance.set("type", "SSN");
+
+        repositoryService.addTrait(aGUID, traitInstance);
+
+        TestUtils.dumpGraph(graphProvider.get());
+
+        // refresh trait names
+        List<String> traitNames = repositoryService.getTraitNames(aGUID);
+        Assert.assertEquals(traitNames.size(), 3);
+        Assert.assertTrue(traitNames.contains(traitName));
+
+        ITypedReferenceableInstance instance = repositoryService.getEntityDefinition(aGUID);
+        IStruct traitInstanceRef = instance.getTrait(traitName);
+        String type = (String) traitInstanceRef.get("type");
+        Assert.assertEquals(type, "SSN");
+    }
+
+    @Test (expectedExceptions = NullPointerException.class)
+    public void testAddTraitWithNullInstance() throws Exception {
+        repositoryService.addTrait(getGUID(), null);
+        Assert.fail();
+    }
+
+    @Test (dependsOnMethods = "testAddTrait", expectedExceptions = RepositoryException.class)
+    public void testAddTraitForBadEntity() throws Exception {
+        TraitType traitType = typeSystem.getDataType(TraitType.class, PII);
+        ITypedStruct traitInstance = traitType.createInstance();
+
+        repositoryService.addTrait(UUID.randomUUID().toString(), traitInstance);
+        Assert.fail();
+    }
+
+    @Test (dependsOnMethods = "testAddTrait")
+    public void testDeleteTrait() throws Exception {
+        final String aGUID = getGUID();
+
+        List<String> traitNames = repositoryService.getTraitNames(aGUID);
+        Assert.assertEquals(traitNames.size(), 3);
+        Assert.assertTrue(traitNames.contains(PII));
+        Assert.assertTrue(traitNames.contains(CLASSIFICATION));
+        Assert.assertTrue(traitNames.contains("P_I_I"));
+
+        repositoryService.deleteTrait(aGUID, PII);
+
+        // refresh trait names
+        traitNames = repositoryService.getTraitNames(aGUID);
+        Assert.assertEquals(traitNames.size(), 2);
+        Assert.assertTrue(traitNames.contains(CLASSIFICATION));
+        Assert.assertFalse(traitNames.contains(PII));
+    }
+
+    @Test (expectedExceptions = RepositoryException.class)
+    public void testDeleteTraitForNonExistentEntity() throws Exception {
+        repositoryService.deleteTrait(UUID.randomUUID().toString(), PII);
+        Assert.fail();
+    }
+
+    @Test (expectedExceptions = RepositoryException.class)
+    public void testDeleteTraitForNonExistentTrait() throws Exception {
+        final String aGUID = getGUID();
+        repositoryService.deleteTrait(aGUID, "PCI");
+        Assert.fail();
+    }
+
+    @Test (dependsOnMethods = "testCreateEntity")
+    public void testGetIdFromVertex() throws Exception {
+        Vertex tableVertex = getTableEntityVertex();
+
+        String guid = tableVertex.getProperty(Constants.GUID_PROPERTY_KEY);
+        if (guid == null) {
+            Assert.fail();
+        }
+
+        Id expected = new Id(guid,
+                tableVertex.<Integer>getProperty(Constants.VERSION_PROPERTY_KEY), TABLE_TYPE);
+        Assert.assertEquals(repositoryService.getIdFromVertex(TABLE_TYPE, tableVertex), expected);
+    }
+
+    @Test (dependsOnMethods = "testCreateEntity")
+    public void testGetTypeName() throws Exception {
+        Vertex tableVertex = getTableEntityVertex();
+        Assert.assertEquals(repositoryService.getTypeName(tableVertex), TABLE_TYPE);
+    }
+
+    @Test(dependsOnMethods = "testCreateEntity")
+    public void testSearchByDSLQuery() throws Exception {
+        String dslQuery = "hive_database as PII";
+        System.out.println("Executing dslQuery = " + dslQuery);
+        String jsonResults = discoveryService.searchByDSL(dslQuery);
+        Assert.assertNotNull(jsonResults);
+
+        JSONObject results = new JSONObject(jsonResults);
+        Assert.assertEquals(results.length(), 3);
+        System.out.println("results = " + results);
+
+        Object query = results.get("query");
+        Assert.assertNotNull(query);
+
+        JSONObject dataType = results.getJSONObject("dataType");
+        Assert.assertNotNull(dataType);
+        String typeName = dataType.getString("typeName");
+        Assert.assertNotNull(typeName);
+
+        JSONArray rows = results.getJSONArray("rows");
+        Assert.assertNotNull(rows);
+        Assert.assertTrue(rows.length() > 0);
+
+        for (int index = 0; index < rows.length(); index++) {
+            JSONObject row = rows.getJSONObject(index);
+            String type = row.getString("$typeName$");
+            Assert.assertEquals(type, "hive_database");
+
+            String name = row.getString("name");
+            Assert.assertEquals(name, DATABASE_NAME);
+        }
+    }
+
+    @Test(dependsOnMethods = "testSubmitEntity")
+    public void testSearchByDSLWithInheritance() throws Exception {
+        String dslQuery = "Person where name = 'Jane'";
+        System.out.println("Executing dslQuery = " + dslQuery);
+        String jsonResults = discoveryService.searchByDSL(dslQuery);
+        Assert.assertNotNull(jsonResults);
+
+        JSONObject results = new JSONObject(jsonResults);
+        Assert.assertEquals(results.length(), 3);
+        System.out.println("results = " + results);
+
+        Object query = results.get("query");
+        Assert.assertNotNull(query);
+
+        JSONObject dataType = results.getJSONObject("dataType");
+        Assert.assertNotNull(dataType);
+        String typeName = dataType.getString("typeName");
+        Assert.assertEquals(typeName, "Person");
+
+        JSONArray rows = results.getJSONArray("rows");
+        Assert.assertEquals(rows.length(), 1);
+
+        JSONObject row = rows.getJSONObject(0);
+        Assert.assertEquals(row.getString("$typeName$"), "Manager");
+        Assert.assertEquals(row.getString("name"), "Jane");
+    }
+
+    @Test(dependsOnMethods = "testCreateEntity")
+    public void testBug37860() throws Exception {
+        String dslQuery =
+                "hive_table as t where name = 'bar' " +
+                        "database where name = 'foo' and description = 'foo database' select t";
+        System.out.println("Executing dslQuery = " + dslQuery);
+        String jsonResults = discoveryService.searchByDSL(dslQuery);
+        Assert.assertNotNull(jsonResults);
+
+        JSONObject results = new JSONObject(jsonResults);
+        Assert.assertEquals(results.length(), 3);
+        System.out.println("results = " + results);
+
+        Object query = results.get("query");
+        Assert.assertNotNull(query);
+
+        JSONObject dataType = results.getJSONObject("dataType");
+        Assert.assertNotNull(dataType);
+
+        JSONArray rows = results.getJSONArray("rows");
+        Assert.assertEquals(rows.length(), 1);
+
+    }
+
+    /**
+      * Full text search requires GraphBackedSearchIndexer, and GraphBackedSearchIndexer can't be enabled in
+      * GraphBackedDiscoveryServiceTest because of its test data. So, test for full text search is in
+      * GraphBackedMetadataRepositoryTest:(
+      */
+    @Test(dependsOnMethods = "testSubmitEntity")
+    public void testFullTextSearch() throws Exception {
+        //todo fix this
+        //Weird: with lucene, the test passes without sleep
+        //but with elasticsearch, doesn't work without sleep. why??
+        long sleepInterval = 1000;
+
+        //person in hr department whose name is john
+        Thread.sleep(sleepInterval);
+        String response = discoveryService.searchByFullText("john");
+        Assert.assertNotNull(response);
+        JSONArray results = new JSONArray(response);
+        Assert.assertEquals(results.length(), 1);
+        JSONObject row = (JSONObject) results.get(0);
+        Assert.assertEquals(row.get("typeName"), "Person");
+
+        //person in hr department who lives in santa clara
+        response = discoveryService.searchByFullText("Jane AND santa AND clara");
+        Assert.assertNotNull(response);
+        results = new JSONArray(response);
+        Assert.assertEquals(results.length(), 1);
+        row = (JSONObject) results.get(0);
+        Assert.assertEquals(row.get("typeName"), "Manager");
+
+        //search for person in hr department whose name starts is john/jahn
+        response = discoveryService.searchByFullText("hr AND (john OR jahn)");
+        Assert.assertNotNull(response);
+        results = new JSONArray(response);
+        Assert.assertEquals(results.length(), 1);
+        row = (JSONObject) results.get(0);
+        Assert.assertEquals(row.get("typeName"), "Person");
+    }
+
+    private void createHiveTypes() throws Exception {
+        HierarchicalTypeDefinition<ClassType> superTypeDefinition =
+                TypesUtil.createClassTypeDef(SUPER_TYPE_NAME,
+                        ImmutableList.<String>of(),
+                        TypesUtil.createOptionalAttrDef("namespace", DataTypes.STRING_TYPE),
+                        TypesUtil.createOptionalAttrDef("cluster", DataTypes.STRING_TYPE),
+                        TypesUtil.createOptionalAttrDef("colo", DataTypes.STRING_TYPE));
+
+        HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
+                TypesUtil.createClassTypeDef(DATABASE_TYPE,
+                        ImmutableList.of(SUPER_TYPE_NAME),
+                        TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
+                        TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
+
+
+        StructTypeDefinition structTypeDefinition =
+                new StructTypeDefinition("serdeType",
+                        new AttributeDefinition[]{
+                                TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                                TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
+                        });
+
+        EnumValue values[] = {
+                new EnumValue("MANAGED", 1),
+                new EnumValue("EXTERNAL", 2),
+        };
+
+        EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("tableType", values);
+        typeSystem.defineEnumType(enumTypeDefinition);
+
+        HierarchicalTypeDefinition<ClassType> columnsDefinition =
+                TypesUtil.createClassTypeDef("column_type",
+                        ImmutableList.<String>of(),
+                        TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
+
+        StructTypeDefinition partitionDefinition =
+                new StructTypeDefinition("partition_type",
+                        new AttributeDefinition[]{
+                                TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        });
+
+        HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
+                TypesUtil.createClassTypeDef(TABLE_TYPE,
+                        ImmutableList.of(SUPER_TYPE_NAME),
+                        TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
+                        TypesUtil.createOptionalAttrDef("created", DataTypes.DATE_TYPE),
+                        // enum
+                        new AttributeDefinition("tableType", "tableType",
+                                Multiplicity.REQUIRED, false, null),
+                        // array of strings
+                        new AttributeDefinition("columnNames",
+                                String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
+                                Multiplicity.COLLECTION, false, null),
+                        // array of classes
+                        new AttributeDefinition("columns",
+                                String.format("array<%s>", "column_type"),
+                                Multiplicity.COLLECTION, true, null),
+                        // array of structs
+                        new AttributeDefinition("partitions",
+                                String.format("array<%s>", "partition_type"),
+                                Multiplicity.COLLECTION, true, null),
+                        // map of primitives
+                        new AttributeDefinition("parametersMap",
+                                DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
+                                        DataTypes.STRING_TYPE.getName()),
+                                Multiplicity.COLLECTION, true, null),
+                        // map of classes - todo - enable this
+//                        new AttributeDefinition("columnsMap",
+//                                DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
+//                                        "column_type"),
+//                                Multiplicity.COLLECTION, true, null),
+                        // map of structs   todo - enable this
+//                        new AttributeDefinition("partitionsMap",
+//                                DataTypes.mapTypeName(DataTypes.STRING_TYPE.getName(),
+//                                        "partition_type"),
+//                                Multiplicity.COLLECTION, true, null),
+                        // struct reference
+                        new AttributeDefinition("serde1",
+                                "serdeType", Multiplicity.REQUIRED, false, null),
+                        new AttributeDefinition("serde2",
+                                "serdeType", Multiplicity.REQUIRED, false, null),
+                        // class reference
+                        new AttributeDefinition("database",
+                                DATABASE_TYPE, Multiplicity.REQUIRED, true, null)
+                );
+
+        HierarchicalTypeDefinition<TraitType> classificationTypeDefinition =
+                TypesUtil.createTraitTypeDef(CLASSIFICATION,
+                        ImmutableList.<String>of(),
+                        TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
+
+        HierarchicalTypeDefinition<TraitType> fetlClassificationTypeDefinition =
+                TypesUtil.createTraitTypeDef("fetl" + CLASSIFICATION,
+                        ImmutableList.of(CLASSIFICATION),
+                        TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE));
+
+        typeSystem.defineTypes(
+                ImmutableList.of(structTypeDefinition, partitionDefinition),
+                ImmutableList.of(classificationTypeDefinition, fetlClassificationTypeDefinition),
+                ImmutableList.of(superTypeDefinition, databaseTypeDefinition,
+                        columnsDefinition, tableTypeDefinition));
+    }
+
+    private ITypedReferenceableInstance createHiveTableInstance(
+            Referenceable databaseInstance) throws Exception {
+        Referenceable tableInstance = new Referenceable(TABLE_TYPE, CLASSIFICATION);
+        tableInstance.set("name", TABLE_NAME);
+        tableInstance.set("description", "bar table");
+        tableInstance.set("type", "managed");
+        tableInstance.set("created", new Date(BaseTest.TEST_DATE_IN_LONG));
+        tableInstance.set("tableType", 1); // enum
+
+        // super type
+        tableInstance.set("namespace", "colo:cluster:hive:db:table");
+        tableInstance.set("cluster", "cluster-1");
+        tableInstance.set("colo", "colo-1");
+
+        // refer to an existing class
+        tableInstance.set("database", databaseInstance);
+
+        ArrayList<String> columnNames = new ArrayList<>();
+        columnNames.add("first_name");
+        columnNames.add("last_name");
+        tableInstance.set("columnNames", columnNames);
+
+        Struct traitInstance = (Struct) tableInstance.getTrait(CLASSIFICATION);
+        traitInstance.set("tag", "foundation_etl");
+
+        Struct serde1Instance = new Struct("serdeType");
+        serde1Instance.set("name", "serde1");
+        serde1Instance.set("serde", "serde1");
+        tableInstance.set("serde1", serde1Instance);
+
+        Struct serde2Instance = new Struct("serdeType");
+        serde2Instance.set("name", "serde2");
+        serde2Instance.set("serde", "serde2");
+        tableInstance.set("serde2", serde2Instance);
+
+        // HashMap<String, Referenceable> columnsMap = new HashMap<>();
+        ArrayList<Referenceable> columns = new ArrayList<>();
+        for (int index = 0; index < 5; index++) {
+            Referenceable columnInstance = new Referenceable("column_type");
+            final String name = "column_" + index;
+            columnInstance.set("name", name);
+            columnInstance.set("type", "string");
+
+            columns.add(columnInstance);
+            // columnsMap.put(name, columnInstance);
+        }
+        tableInstance.set("columns", columns);
+        // tableInstance.set("columnsMap", columnsMap);
+
+//        HashMap<String, Struct> partitionsMap = new HashMap<>();
+        ArrayList<Struct> partitions = new ArrayList<>();
+        for (int index = 0; index < 5; index++) {
+            Struct partitionInstance = new Struct("partition_type");
+            final String name = "partition_" + index;
+            partitionInstance.set("name", name);
+
+            partitions.add(partitionInstance);
+//            partitionsMap.put(name, partitionInstance);
+        }
+        tableInstance.set("partitions", partitions);
+//        tableInstance.set("partitionsMap", partitionsMap);
+
+        HashMap<String, String> parametersMap = new HashMap<>();
+        parametersMap.put("foo", "bar");
+        parametersMap.put("bar", "baz");
+        parametersMap.put("some", "thing");
+        tableInstance.set("parametersMap", parametersMap);
+
+        ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE);
+        return tableType.convert(tableInstance, Multiplicity.REQUIRED);
+    }
+
+    private String random() {
+        return RandomStringUtils.random(10);
+    }
+
+    @Test
+    public void testUTFValues() throws Exception {
+        Referenceable hrDept = new Referenceable("Department");
+        Referenceable john = new Referenceable("Person");
+        john.set("name", random());
+        john.set("department", hrDept);
+
+        hrDept.set("name", random());
+        hrDept.set("employees", ImmutableList.of(john));
+
+        ClassType deptType = typeSystem.getDataType(ClassType.class, "Department");
+        ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
+
+        guid = repositoryService.createEntity(hrDept2);
+        Assert.assertNotNull(guid);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java b/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java
new file mode 100755
index 0000000..3f0782b
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/graph/GraphRepoMapperScaleTest.java
@@ -0,0 +1,272 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.graph;
+
+import com.google.common.collect.ImmutableList;
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.thinkaurelius.titan.core.TitanIndexQuery;
+import com.tinkerpop.blueprints.Compare;
+import com.tinkerpop.blueprints.GraphQuery;
+import com.tinkerpop.blueprints.Vertex;
+import org.apache.atlas.RepositoryMetadataModule;
+import org.apache.atlas.repository.Constants;
+import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.types.AttributeDefinition;
+import org.apache.atlas.typesystem.types.ClassType;
+import org.apache.atlas.typesystem.types.DataTypes;
+import org.apache.atlas.typesystem.types.EnumType;
+import org.apache.atlas.typesystem.types.EnumTypeDefinition;
+import org.apache.atlas.typesystem.types.EnumValue;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.IDataType;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.StructTypeDefinition;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Guice;
+import org.testng.annotations.Test;
+
+import javax.inject.Inject;
+import java.util.ArrayList;
+import java.util.Map;
+
+@Test
+@Guice(modules = RepositoryMetadataModule.class)
+public class GraphRepoMapperScaleTest {
+
+    private static final String DATABASE_TYPE = "hive_database_type";
+    private static final String DATABASE_NAME = "foo";
+    private static final String TABLE_TYPE = "hive_table_type";
+    private static final String TABLE_NAME = "bar";
+
+    @Inject
+    private GraphProvider<TitanGraph> graphProvider;
+    @Inject
+    private GraphBackedMetadataRepository repositoryService;
+
+    private GraphBackedSearchIndexer searchIndexer;
+    private TypeSystem typeSystem;
+    private String dbGUID;
+
+    @BeforeClass
+    public void setUp() throws Exception {
+        searchIndexer = new GraphBackedSearchIndexer(graphProvider);
+
+        typeSystem = TypeSystem.getInstance();
+
+        createHiveTypes();
+    }
+
+    @Test
+    public void testSubmitEntity() throws Exception {
+        Referenceable databaseInstance = new Referenceable(DATABASE_TYPE);
+        databaseInstance.set("name", DATABASE_NAME);
+        databaseInstance.set("description", "foo database");
+        // System.out.println("databaseInstance = " + databaseInstance);
+
+        ClassType dbType = typeSystem.getDataType(ClassType.class, DATABASE_TYPE);
+        ITypedReferenceableInstance db = dbType.convert(databaseInstance, Multiplicity.REQUIRED);
+
+        dbGUID = repositoryService.createEntity(db);
+
+        Referenceable dbInstance = new Referenceable(
+                dbGUID, DATABASE_TYPE, databaseInstance.getValuesMap());
+
+        for (int index = 0; index < 1000; index++) {
+            ITypedReferenceableInstance table = createHiveTableInstance(dbInstance, index);
+            repositoryService.createEntity(table);
+        }
+    }
+
+    @Test(dependsOnMethods = "testSubmitEntity")
+    public void testSearchIndex() throws Exception {
+        searchWithOutIndex(Constants.GUID_PROPERTY_KEY, dbGUID);
+        searchWithOutIndex(Constants.ENTITY_TYPE_PROPERTY_KEY, "hive_column_type");
+        searchWithOutIndex(Constants.ENTITY_TYPE_PROPERTY_KEY, TABLE_TYPE);
+
+        searchWithOutIndex("hive_table_type.name", "bar-999");
+        searchWithIndex("hive_table_type.name", "bar-999");
+
+        for (int index = 500; index < 600; index++) {
+            searchWithIndex("hive_table_type.name", "bar-" + index);
+        }
+    }
+
+    private void searchWithOutIndex(String key, String value) {
+        TitanGraph graph = graphProvider.get();
+        long start = System.currentTimeMillis();
+        int count = 0;
+        try {
+            GraphQuery query = graph.query()
+                    .has(key, Compare.EQUAL, value);
+            for (Vertex ignored : query.vertices()) {
+                count++;
+            }
+        } finally {
+            System.out.println("Search on [" + key + "=" + value + "] returned results: " + count
+                    + ", took " + (System.currentTimeMillis() - start) + " ms");
+        }
+    }
+
+    private void searchWithIndex(String key, String value) {
+        TitanGraph graph = graphProvider.get();
+        long start = System.currentTimeMillis();
+        int count = 0;
+        try {
+            String queryString = "v.\"" + key + "\":(" + value + ")";
+            TitanIndexQuery query = graph.indexQuery(Constants.VERTEX_INDEX, queryString);
+            for (TitanIndexQuery.Result<Vertex> ignored : query.vertices()) {
+                count++;
+            }
+        } finally {
+            System.out.println("Search on [" + key + "=" + value + "] returned results: " + count
+                    + ", took " + (System.currentTimeMillis() - start) + " ms");
+        }
+    }
+
+    private void createHiveTypes() throws Exception {
+        HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
+                TypesUtil.createClassTypeDef(DATABASE_TYPE,
+                        ImmutableList.<String>of(),
+                        TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
+
+        StructTypeDefinition structTypeDefinition =
+                new StructTypeDefinition("hive_serde_type",
+                        new AttributeDefinition[]{
+                                TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                                TypesUtil.createRequiredAttrDef("serde", DataTypes.STRING_TYPE)
+                        });
+
+        EnumValue values[] = {
+                new EnumValue("MANAGED", 1),
+                new EnumValue("EXTERNAL", 2),
+        };
+
+        EnumTypeDefinition enumTypeDefinition = new EnumTypeDefinition("table_type", values);
+        EnumType enumType = typeSystem.defineEnumType(enumTypeDefinition);
+        searchIndexer.onAdd("table_type", enumType);
+
+        HierarchicalTypeDefinition<ClassType> columnsDefinition =
+                TypesUtil.createClassTypeDef("hive_column_type",
+                        ImmutableList.<String>of(),
+                        TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE));
+
+        StructTypeDefinition partitionDefinition =
+                new StructTypeDefinition("hive_partition_type",
+                        new AttributeDefinition[]{
+                                TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        });
+
+        HierarchicalTypeDefinition<ClassType> tableTypeDefinition =
+                TypesUtil.createClassTypeDef(TABLE_TYPE,
+                        ImmutableList.<String>of(),
+                        TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
+                        // enum
+                        new AttributeDefinition("tableType", "table_type",
+                                Multiplicity.REQUIRED, false, null),
+                        // array of strings
+                        new AttributeDefinition("columnNames",
+                                String.format("array<%s>", DataTypes.STRING_TYPE.getName()),
+                                Multiplicity.COLLECTION, false, null),
+                        // array of classes
+                        new AttributeDefinition("columns",
+                                String.format("array<%s>", "hive_column_type"),
+                                Multiplicity.COLLECTION, true, null),
+                        // array of structs
+                        new AttributeDefinition("partitions",
+                                String.format("array<%s>", "hive_partition_type"),
+                                Multiplicity.COLLECTION, true, null),
+                        // struct reference
+                        new AttributeDefinition("serde1",
+                                "hive_serde_type", Multiplicity.REQUIRED, false, null),
+                        new AttributeDefinition("serde2",
+                                "hive_serde_type", Multiplicity.REQUIRED, false, null),
+                        // class reference
+                        new AttributeDefinition("database",
+                                DATABASE_TYPE, Multiplicity.REQUIRED, true, null));
+
+        HierarchicalTypeDefinition<TraitType> classificationTypeDefinition =
+                TypesUtil.createTraitTypeDef("pii_type", ImmutableList.<String>of());
+
+        Map<String, IDataType> types = typeSystem.defineTypes(
+                ImmutableList.of(structTypeDefinition, partitionDefinition),
+                ImmutableList.of(classificationTypeDefinition),
+                ImmutableList.of(databaseTypeDefinition, columnsDefinition, tableTypeDefinition));
+
+        for (Map.Entry<String, IDataType> entry : types.entrySet()) {
+            searchIndexer.onAdd(entry.getKey(), entry.getValue());
+        }
+    }
+
+    private ITypedReferenceableInstance createHiveTableInstance(
+            Referenceable databaseInstance, int uberIndex) throws Exception {
+
+        Referenceable tableInstance = new Referenceable(TABLE_TYPE, "pii_type");
+        tableInstance.set("name", TABLE_NAME + "-" + uberIndex);
+        tableInstance.set("description", "bar table" + "-" + uberIndex);
+        tableInstance.set("type", "managed");
+        tableInstance.set("tableType", 1); // enum
+
+        // refer to an existing class
+        tableInstance.set("database", databaseInstance);
+
+        ArrayList<String> columnNames = new ArrayList<>();
+        columnNames.add("first_name" + "-" + uberIndex);
+        columnNames.add("last_name" + "-" + uberIndex);
+        tableInstance.set("columnNames", columnNames);
+
+        Struct serde1Instance = new Struct("hive_serde_type");
+        serde1Instance.set("name", "serde1" + "-" + uberIndex);
+        serde1Instance.set("serde", "serde1" + "-" + uberIndex);
+        tableInstance.set("serde1", serde1Instance);
+
+        Struct serde2Instance = new Struct("hive_serde_type");
+        serde2Instance.set("name", "serde2" + "-" + uberIndex);
+        serde2Instance.set("serde", "serde2" + "-" + uberIndex);
+        tableInstance.set("serde2", serde2Instance);
+
+        ArrayList<Referenceable> columns = new ArrayList<>();
+        for (int index = 0; index < 5; index++) {
+            Referenceable columnInstance = new Referenceable("hive_column_type");
+            columnInstance.set("name", "column_" + "-" + uberIndex + "-" + index);
+            columnInstance.set("type", "string");
+            columns.add(columnInstance);
+        }
+        tableInstance.set("columns", columns);
+
+        ArrayList<Struct> partitions = new ArrayList<>();
+        for (int index = 0; index < 5; index++) {
+            Struct partitionInstance = new Struct("hive_partition_type");
+            partitionInstance.set("name", "partition_" + "-" + uberIndex + "-" + index);
+            partitions.add(partitionInstance);
+        }
+        tableInstance.set("partitions", partitions);
+
+        ClassType tableType = typeSystem.getDataType(ClassType.class, TABLE_TYPE);
+        return tableType.convert(tableInstance, Multiplicity.REQUIRED);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/memory/ClassTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/memory/ClassTest.java b/repository/src/test/java/org/apache/atlas/repository/memory/ClassTest.java
new file mode 100755
index 0000000..113a975
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/memory/ClassTest.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.memory;
+
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.repository.BaseTest;
+import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.types.ClassType;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.TypeSystem;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ClassTest extends BaseTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+    }
+
+    @Test
+    public void test1() throws MetadataException {
+
+        TypeSystem ts = getTypeSystem();
+
+        defineDeptEmployeeTypes(ts);
+        Referenceable hrDept = createDeptEg1(ts);
+        ClassType deptType = ts.getDataType(ClassType.class, "Department");
+        ITypedReferenceableInstance hrDept2 = deptType.convert(hrDept, Multiplicity.REQUIRED);
+
+
+        Assert.assertEquals(hrDept2.toString(), "{\n" +
+                "\tid : (type: Department, id: <unassigned>)\n" +
+                "\tname : \thr\n" +
+                "\temployees : \t[{\n" +
+                "\tid : (type: Person, id: <unassigned>)\n" +
+                "\tname : \tJohn\n" +
+                "\tdepartment : (type: Department, id: <unassigned>)\n" +
+                "\tmanager : (type: Manager, id: <unassigned>)\n" +
+                "}, {\n" +
+                "\tid : (type: Manager, id: <unassigned>)\n" +
+                "\tsubordinates : \t[{\n" +
+                "\tid : (type: Person, id: <unassigned>)\n" +
+                "\tname : \tJohn\n" +
+                "\tdepartment : (type: Department, id: <unassigned>)\n" +
+                "\tmanager : (type: Manager, id: <unassigned>)\n" +
+                "}]\n" +
+                "\tname : \tJane\n" +
+                "\tdepartment : (type: Department, id: <unassigned>)\n" +
+                "\tmanager : <null>\n" +
+                "\n" +
+                "\tSecurityClearance : \t{\n" +
+                "\t\tlevel : \t\t1\n" +
+                "\t}}]\n" +
+                "}");
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/memory/EnumTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/memory/EnumTest.java b/repository/src/test/java/org/apache/atlas/repository/memory/EnumTest.java
new file mode 100755
index 0000000..731bca6
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/memory/EnumTest.java
@@ -0,0 +1,315 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.memory;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.repository.BaseTest;
+import org.apache.atlas.typesystem.IReferenceableInstance;
+import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.typesystem.ITypedStruct;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.json.Serialization$;
+import org.apache.atlas.typesystem.types.ClassType;
+import org.apache.atlas.typesystem.types.DataTypes;
+import org.apache.atlas.typesystem.types.EnumType;
+import org.apache.atlas.typesystem.types.EnumValue;
+import org.apache.atlas.typesystem.types.HierarchicalType;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.StructType;
+import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.Date;
+import java.util.Map;
+
+public class EnumTest extends BaseTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+    }
+
+    void defineEnums(TypeSystem ts) throws MetadataException {
+        ts.defineEnumType("HiveObjectType",
+                new EnumValue("GLOBAL", 1),
+                new EnumValue("DATABASE", 2),
+                new EnumValue("TABLE", 3),
+                new EnumValue("PARTITION", 4),
+                new EnumValue("COLUMN", 5));
+
+        ts.defineEnumType("PrincipalType",
+                new EnumValue("USER", 1),
+                new EnumValue("ROLE", 2),
+                new EnumValue("GROUP", 3));
+
+        ts.defineEnumType("TxnState",
+                new EnumValue("COMMITTED", 1),
+                new EnumValue("ABORTED", 2),
+                new EnumValue("OPEN", 3));
+
+        ts.defineEnumType("LockLevel",
+                new EnumValue("DB", 1),
+                new EnumValue("TABLE", 2),
+                new EnumValue("PARTITION", 3));
+
+    }
+
+    protected void fillStruct(Struct s) throws MetadataException {
+        s.set("a", 1);
+        s.set("b", true);
+        s.set("c", (byte) 1);
+        s.set("d", (short) 2);
+        s.set("e", 1);
+        s.set("f", 1);
+        s.set("g", 1L);
+        s.set("h", 1.0f);
+        s.set("i", 1.0);
+        s.set("j", BigInteger.valueOf(1L));
+        s.set("k", new BigDecimal(1));
+        s.set("l", new Date(1418265358440L));
+        s.set("m", Lists.asList(1, new Integer[]{1}));
+        s.set("n",
+                Lists.asList(BigDecimal.valueOf(1.1), new BigDecimal[]{BigDecimal.valueOf(1.1)}));
+        Map<String, Double> hm = Maps.newHashMap();
+        hm.put("a", 1.0);
+        hm.put("b", 2.0);
+        s.set("o", hm);
+        s.set("enum1", "GLOBAL");
+        s.set("enum2", 1);
+        s.set("enum3", "COMMITTED");
+        s.set("enum4", 3);
+    }
+
+    protected Struct createStructWithEnum(String typeName) throws MetadataException {
+        Struct s = new Struct(typeName);
+        fillStruct(s);
+        return s;
+    }
+
+    protected Referenceable createInstanceWithEnum(String typeName) throws MetadataException {
+        Referenceable r = new Referenceable(typeName);
+        fillStruct(r);
+        return r;
+    }
+
+    protected ClassType defineClassTypeWithEnum(TypeSystem ts) throws MetadataException {
+        return ts.defineClassType(TypesUtil.createClassTypeDef("t4",
+                ImmutableList.<String>of(),
+                TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
+                TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
+                TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
+                TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
+                TypesUtil.createOptionalAttrDef("enum1",
+                        ts.getDataType(EnumType.class, "HiveObjectType")),
+                TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
+                TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
+                TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
+                TypesUtil.createOptionalAttrDef("enum2",
+                        ts.getDataType(EnumType.class, "PrincipalType")),
+                TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
+                TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
+                TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
+                TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
+                TypesUtil
+                        .createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
+                TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
+                TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
+                TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
+                TypesUtil.createOptionalAttrDef("o",
+                        ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
+                TypesUtil.createOptionalAttrDef("enum4",
+                        ts.getDataType(EnumType.class, "LockLevel"))));
+    }
+
+    @Test
+    public void testStruct() throws MetadataException {
+        TypeSystem ts = getTypeSystem();
+        defineEnums(ts);
+        StructType structType = ts.defineStructType("t3",
+                true,
+                TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
+                TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
+                TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
+                TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
+                TypesUtil.createOptionalAttrDef("enum1",
+                        ts.getDataType(EnumType.class, "HiveObjectType")),
+                TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
+                TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
+                TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
+                TypesUtil.createOptionalAttrDef("enum2",
+                        ts.getDataType(EnumType.class, "PrincipalType")),
+                TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
+                TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
+                TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
+                TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
+                TypesUtil
+                        .createOptionalAttrDef("enum3", ts.getDataType(EnumType.class, "TxnState")),
+
+                TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
+                TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
+                TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
+                TypesUtil.createOptionalAttrDef("o",
+                        ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)),
+                TypesUtil.createOptionalAttrDef("enum4",
+                        ts.getDataType(EnumType.class, "LockLevel")));
+
+        Struct s = createStructWithEnum("t3");
+        ITypedStruct typedS = structType.convert(s, Multiplicity.REQUIRED);
+        Assert.assertEquals(typedS.toString(), "{\n" +
+                "\ta : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\td : \t2\n" +
+                "\tenum1 : \tGLOBAL\n" +
+                "\te : \t1\n" +
+                "\tf : \t1\n" +
+                "\tg : \t1\n" +
+                "\tenum2 : \tUSER\n" +
+                "\th : \t1.0\n" +
+                "\ti : \t1.0\n" +
+                "\tj : \t1\n" +
+                "\tk : \t1\n" +
+                "\tenum3 : \tCOMMITTED\n" +
+                "\tl : \t" + TEST_DATE + "\n" +
+                "\tm : \t[1, 1]\n" +
+                "\tn : \t[1.1, 1.1]\n" +
+                "\to : \t{b=2.0, a=1.0}\n" +
+                "\tenum4 : \tPARTITION\n" +
+                "}");
+    }
+
+    @Test
+    public void testClass() throws MetadataException {
+        TypeSystem ts = getTypeSystem();
+        defineEnums(ts);
+        ClassType clsType = defineClassTypeWithEnum(ts);
+
+        IReferenceableInstance r = createInstanceWithEnum("t4");
+        ITypedReferenceableInstance typedR = clsType.convert(r, Multiplicity.REQUIRED);
+        Assert.assertEquals(typedR.toString(), "{\n" +
+                "\tid : (type: t4, id: <unassigned>)\n" +
+                "\ta : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\td : \t2\n" +
+                "\tenum1 : \tGLOBAL\n" +
+                "\te : \t1\n" +
+                "\tf : \t1\n" +
+                "\tg : \t1\n" +
+                "\tenum2 : \tUSER\n" +
+                "\th : \t1.0\n" +
+                "\ti : \t1.0\n" +
+                "\tj : \t1\n" +
+                "\tk : \t1\n" +
+                "\tenum3 : \tCOMMITTED\n" +
+                "\tl : \t" + TEST_DATE + "\n" +
+                "\tm : \t[1, 1]\n" +
+                "\tn : \t[1.1, 1.1]\n" +
+                "\to : \t{b=2.0, a=1.0}\n" +
+                "\tenum4 : \tPARTITION\n" +
+                "}");
+    }
+
+    @Test
+    public void testStorage() throws MetadataException {
+
+        TypeSystem ts = getTypeSystem();
+        defineEnums(ts);
+        ClassType clsType = defineClassTypeWithEnum(ts);
+
+        getRepository().defineTypes(ImmutableList.of((HierarchicalType) clsType));
+
+        IReferenceableInstance r = createInstanceWithEnum("t4");
+        IReferenceableInstance r1 = getRepository().create(r);
+
+        ITypedReferenceableInstance r2 = getRepository().get(r1.getId());
+        Assert.assertEquals(r2.toString(), "{\n" +
+                "\tid : (type: t4, id: 1)\n" +
+                "\ta : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\td : \t0\n" +
+                "\tenum1 : \tGLOBAL\n" +
+                "\te : \t1\n" +
+                "\tf : \t1\n" +
+                "\tg : \t1\n" +
+                "\tenum2 : \tUSER\n" +
+                "\th : \t1.0\n" +
+                "\ti : \t1.0\n" +
+                "\tj : \t1\n" +
+                "\tk : \t1\n" +
+                "\tenum3 : \tCOMMITTED\n" +
+                "\tl : \t" + TEST_DATE + "\n" +
+                "\tm : \t[1, 1]\n" +
+                "\tn : \t[1.1, 1.1]\n" +
+                "\to : \t{b=2.0, a=1.0}\n" +
+                "\tenum4 : \tPARTITION\n" +
+                "}");
+    }
+
+    @Test
+    public void testJson() throws MetadataException {
+
+        TypeSystem ts = getTypeSystem();
+        defineEnums(ts);
+        ClassType clsType = defineClassTypeWithEnum(ts);
+
+        getRepository().defineTypes(ImmutableList.of((HierarchicalType) clsType));
+
+        IReferenceableInstance r = createInstanceWithEnum("t4");
+        IReferenceableInstance r1 = getRepository().create(r);
+
+        ITypedReferenceableInstance r2 = getRepository().get(r1.getId());
+        String jsonStr = Serialization$.MODULE$.toJson(r2);
+
+        IReferenceableInstance r3 = Serialization$.MODULE$.fromJson(jsonStr);
+        Assert.assertEquals(r3.toString(), "{\n" +
+                "\tid : (type: t4, id: 1)\n" +
+                "\ta : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\td : \t0\n" +
+                "\tenum1 : \tGLOBAL\n" +
+                "\te : \t1\n" +
+                "\tf : \t1\n" +
+                "\tg : \t1\n" +
+                "\tenum2 : \tUSER\n" +
+                "\th : \t1.0\n" +
+                "\ti : \t1.0\n" +
+                "\tj : \t1\n" +
+                "\tk : \t1\n" +
+                "\tenum3 : \tCOMMITTED\n" +
+                "\tl : \t" + TEST_DATE + "\n" +
+                "\tm : \t[1, 1]\n" +
+                "\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
+                ".100000000000000088817841970012523233890533447265625]\n" +
+                "\to : \t{b=2.0, a=1.0}\n" +
+                "\tenum4 : \tPARTITION\n" +
+                "}");
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/memory/InstanceE2ETest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/memory/InstanceE2ETest.java b/repository/src/test/java/org/apache/atlas/repository/memory/InstanceE2ETest.java
new file mode 100755
index 0000000..29d3f65
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/memory/InstanceE2ETest.java
@@ -0,0 +1,167 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.memory;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.repository.BaseTest;
+import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.TypesDef;
+import org.apache.atlas.typesystem.json.InstanceSerialization$;
+import org.apache.atlas.typesystem.json.Serialization$;
+import org.apache.atlas.typesystem.json.TypesSerialization$;
+import org.apache.atlas.typesystem.types.AttributeDefinition;
+import org.apache.atlas.typesystem.types.ClassType;
+import org.apache.atlas.typesystem.types.DataTypes;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.StructTypeDefinition;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.apache.atlas.typesystem.types.TypeSystem;
+import org.apache.atlas.typesystem.types.utils.TypesUtil;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class InstanceE2ETest extends BaseTest {
+
+    protected List<HierarchicalTypeDefinition> createHiveTypes(TypeSystem typeSystem)
+    throws MetadataException {
+        ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>();
+
+        HierarchicalTypeDefinition<ClassType> databaseTypeDefinition =
+                TypesUtil.createClassTypeDef("hive_database",
+                        ImmutableList.<String>of(),
+                        TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                        TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE));
+        typeDefinitions.add(databaseTypeDefinition);
+
+        HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil.createClassTypeDef(
+                "hive_table",
+                ImmutableList.<String>of(),
+                TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE),
+                TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE),
+                TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE),
+                new AttributeDefinition("hive_database",
+                        "hive_database", Multiplicity.REQUIRED, false, "hive_database"));
+        typeDefinitions.add(tableTypeDefinition);
+
+        HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil.createTraitTypeDef(
+                "hive_fetl",
+                ImmutableList.<String>of(),
+                TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE));
+        typeDefinitions.add(fetlTypeDefinition);
+
+        typeSystem.defineTypes(
+                ImmutableList.<StructTypeDefinition>of(),
+                ImmutableList.of(fetlTypeDefinition),
+                ImmutableList.of(databaseTypeDefinition, tableTypeDefinition));
+
+        return typeDefinitions;
+    }
+
+    protected Referenceable createHiveTableReferenceable()
+            throws MetadataException {
+        Referenceable databaseInstance = new Referenceable("hive_database");
+        databaseInstance.set("name", "hive_database");
+        databaseInstance.set("description", "foo database");
+
+        Referenceable tableInstance = new Referenceable("hive_table", "hive_fetl");
+        tableInstance.set("name", "t1");
+        tableInstance.set("description", "bar table");
+        tableInstance.set("type", "managed");
+        tableInstance.set("hive_database", databaseInstance);
+
+        Struct traitInstance = (Struct) tableInstance.getTrait("hive_fetl");
+        traitInstance.set("level", 1);
+
+        tableInstance.set("hive_fetl", traitInstance);
+
+        return tableInstance;
+    }
+
+    protected ITypedReferenceableInstance createHiveTableInstance(TypeSystem typeSystem)
+    throws MetadataException {
+        ClassType tableType = typeSystem.getDataType(ClassType.class, "hive_table");
+        return tableType.convert(createHiveTableReferenceable(), Multiplicity.REQUIRED);
+    }
+
+    @Test
+    public void testType() throws MetadataException {
+
+        TypeSystem ts = getTypeSystem();
+
+        createHiveTypes(ts);
+
+        String jsonStr = TypesSerialization$.MODULE$
+                .toJson(ts, ImmutableList.of("hive_database", "hive_table"));
+        System.out.println(jsonStr);
+
+        TypesDef typesDef1 = TypesSerialization$.MODULE$.fromJson(jsonStr);
+        System.out.println(typesDef1);
+
+        ts.reset();
+        ts.defineTypes(typesDef1);
+        jsonStr = TypesSerialization$.MODULE$
+                .toJson(ts, ImmutableList.of("hive_database", "hive_table"));
+        System.out.println(jsonStr);
+
+    }
+
+    @Test
+    public void testInstance() throws MetadataException {
+
+        TypeSystem ts = getTypeSystem();
+
+        createHiveTypes(ts);
+
+        ITypedReferenceableInstance i = createHiveTableInstance(getTypeSystem());
+
+        String jsonStr = Serialization$.MODULE$.toJson(i);
+        System.out.println(jsonStr);
+
+        i = Serialization$.MODULE$.fromJson(jsonStr);
+        System.out.println(i);
+    }
+
+    @Test
+    public void testInstanceSerialization() throws MetadataException {
+
+        TypeSystem ts = getTypeSystem();
+
+        createHiveTypes(ts);
+
+        Referenceable r = createHiveTableReferenceable();
+        String jsonStr = InstanceSerialization$.MODULE$.toJson(r, true);
+        Referenceable  r1 = InstanceSerialization$.MODULE$.fromJsonReferenceable(jsonStr, true);
+        ClassType tableType = ts.getDataType(ClassType.class, "hive_table");
+
+        ITypedReferenceableInstance i = tableType.convert(r1, Multiplicity.REQUIRED);
+
+        jsonStr = Serialization$.MODULE$.toJson(i);
+        System.out.println(jsonStr);
+
+        i = Serialization$.MODULE$.fromJson(jsonStr);
+        System.out.println(i);
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/memory/StorageTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/memory/StorageTest.java b/repository/src/test/java/org/apache/atlas/repository/memory/StorageTest.java
new file mode 100755
index 0000000..5b1159c
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/memory/StorageTest.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.memory;
+
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.repository.BaseTest;
+import org.apache.atlas.repository.RepositoryException;
+import org.apache.atlas.typesystem.ITypedReferenceableInstance;
+import org.apache.atlas.typesystem.Referenceable;
+import org.apache.atlas.typesystem.persistence.Id;
+import org.apache.atlas.typesystem.types.TypeSystem;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class StorageTest extends BaseTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+    }
+
+    @Test
+    public void test1() throws MetadataException {
+
+        TypeSystem ts = getTypeSystem();
+
+        defineDeptEmployeeTypes(ts);
+
+        Referenceable hrDept = createDeptEg1(ts);
+        ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
+        ITypedReferenceableInstance hrDept3 = getRepository().get(hrDept2.getId());
+        Assert.assertEquals(hrDept3.toString(), "{\n" +
+                "\tid : (type: Department, id: 1)\n" +
+                "\tname : \thr\n" +
+                "\temployees : \t[{\n" +
+                "\tid : (type: Person, id: 2)\n" +
+                "\tname : \tJohn\n" +
+                "\tdepartment : (type: Department, id: 1)\n" +
+                "\tmanager : (type: Manager, id: 3)\n" +
+                "}, {\n" +
+                "\tid : (type: Manager, id: 3)\n" +
+                "\tsubordinates : \t[(type: Person, id: 2)]\n" +
+                "\tname : \tJane\n" +
+                "\tdepartment : (type: Department, id: 1)\n" +
+                "\tmanager : <null>\n" +
+                "\n" +
+                "\tSecurityClearance : \t{\n" +
+                "\t\tlevel : \t\t1\n" +
+                "\t}}]\n" +
+                "}");
+    }
+
+    @Test
+    public void testGetPerson() throws MetadataException {
+        TypeSystem ts = getTypeSystem();
+        defineDeptEmployeeTypes(ts);
+
+        Referenceable hrDept = createDeptEg1(ts);
+        ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
+
+        Id e1Id = new Id(2, 0, "Person");
+        ITypedReferenceableInstance e1 = getRepository().get(e1Id);
+        Assert.assertEquals(e1.toString(), "{\n" +
+                "\tid : (type: Person, id: 2)\n" +
+                "\tname : \tJohn\n" +
+                "\tdepartment : (type: Department, id: 1)\n" +
+                "\tmanager : (type: Manager, id: 3)\n" +
+                "}");
+    }
+
+    @Test
+    public void testInvalidTypeName() throws MetadataException {
+        TypeSystem ts = getTypeSystem();
+        defineDeptEmployeeTypes(ts);
+
+        Referenceable hrDept = createDeptEg1(ts);
+        ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
+
+        Id e1Id = new Id(3, 0, "Person");
+        try {
+            ITypedReferenceableInstance e1 = getRepository().get(e1Id);
+        } catch (RepositoryException re) {
+            RepositoryException me = (RepositoryException) re.getCause();
+            Assert.assertEquals(me.getMessage(), "Invalid Id (unknown) : (type: Person, id: 3)");
+
+        }
+    }
+
+    @Test
+    public void testGetManager() throws MetadataException {
+        TypeSystem ts = getTypeSystem();
+        defineDeptEmployeeTypes(ts);
+
+        Referenceable hrDept = createDeptEg1(ts);
+        ITypedReferenceableInstance hrDept2 = getRepository().create(hrDept);
+
+        Id m1Id = new Id(3, 0, "Manager");
+        ITypedReferenceableInstance m1 = getRepository().get(m1Id);
+        Assert.assertEquals(m1.toString(), "{\n" +
+                "\tid : (type: Manager, id: 3)\n" +
+                "\tsubordinates : \t[(type: Person, id: 2)]\n" +
+                "\tname : \tJane\n" +
+                "\tdepartment : (type: Department, id: 1)\n" +
+                "\tmanager : <null>\n" +
+                "\n" +
+                "\tSecurityClearance : \t{\n" +
+                "\t\tlevel : \t\t1\n" +
+                "\t}}");
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/memory/StructTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/memory/StructTest.java b/repository/src/test/java/org/apache/atlas/repository/memory/StructTest.java
new file mode 100755
index 0000000..d42a899
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/memory/StructTest.java
@@ -0,0 +1,112 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.memory;
+
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.repository.BaseTest;
+import org.apache.atlas.typesystem.ITypedStruct;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.json.InstanceSerialization$;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.StructType;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class StructTest extends BaseTest {
+
+    StructType structType;
+    StructType recursiveStructType;
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        structType = (StructType) getTypeSystem().getDataType(StructType.class, STRUCT_TYPE_1);
+        recursiveStructType = (StructType) getTypeSystem()
+                .getDataType(StructType.class, STRUCT_TYPE_2);
+    }
+
+    @Test
+    public void test1() throws MetadataException {
+        Struct s = createStruct();
+        ITypedStruct ts = structType.convert(s, Multiplicity.REQUIRED);
+        Assert.assertEquals(ts.toString(), "{\n" +
+                "\ta : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\td : \t2\n" +
+                "\te : \t1\n" +
+                "\tf : \t1\n" +
+                "\tg : \t1\n" +
+                "\th : \t1.0\n" +
+                "\ti : \t1.0\n" +
+                "\tj : \t1\n" +
+                "\tk : \t1\n" +
+                "\tl : \t" + TEST_DATE + "\n" +
+                "\tm : \t[1, 1]\n" +
+                "\tn : \t[1.1, 1.1]\n" +
+                "\to : \t{b=2.0, a=1.0}\n" +
+                "}");
+    }
+
+    @Test
+    public void testRecursive() throws MetadataException {
+        Struct s1 = new Struct(recursiveStructType.getName());
+        s1.set("a", 1);
+        Struct s2 = new Struct(recursiveStructType.getName());
+        s2.set("a", 1);
+        s2.set("s", s1);
+        ITypedStruct ts = recursiveStructType.convert(s2, Multiplicity.REQUIRED);
+        Assert.assertEquals(ts.toString(), "{\n" +
+                "\ta : \t1\n" +
+                "\ts : \t{\n" +
+                "\t\ta : \t\t1\n" +
+                "\t\ts : <null>\n" +
+                "\n" +
+                "\t}\n" +
+                "}");
+    }
+
+    @Test
+    public void testSerialization() throws MetadataException {
+        Struct s = createStruct();
+        String jsonStr = InstanceSerialization$.MODULE$.toJson(s, true);
+        Struct s1 = InstanceSerialization$.MODULE$.fromJsonStruct(jsonStr, true);
+        ITypedStruct ts = structType.convert(s1, Multiplicity.REQUIRED);
+        Assert.assertEquals(ts.toString(), "{\n" +
+                "\ta : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\td : \t2\n" +
+                "\te : \t1\n" +
+                "\tf : \t1\n" +
+                "\tg : \t1\n" +
+                "\th : \t1.0\n" +
+                "\ti : \t1.0\n" +
+                "\tj : \t1\n" +
+                "\tk : \t1\n" +
+                "\tl : \t" + TEST_DATE + "\n" +
+                "\tm : \t[1, 1]\n" +
+                "\tn : \t[1.100000000000000088817841970012523233890533447265625, 1" +
+                ".100000000000000088817841970012523233890533447265625]\n" +
+                "\to : \t{b=2.0, a=1.0}\n" +
+                "}");
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/atlas/repository/memory/TraitTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/atlas/repository/memory/TraitTest.java b/repository/src/test/java/org/apache/atlas/repository/memory/TraitTest.java
new file mode 100755
index 0000000..583d43f
--- /dev/null
+++ b/repository/src/test/java/org/apache/atlas/repository/memory/TraitTest.java
@@ -0,0 +1,207 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.repository.memory;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.repository.BaseTest;
+import org.apache.atlas.typesystem.IStruct;
+import org.apache.atlas.typesystem.ITypedStruct;
+import org.apache.atlas.typesystem.Struct;
+import org.apache.atlas.typesystem.types.DataTypes;
+import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
+import org.apache.atlas.typesystem.types.Multiplicity;
+import org.apache.atlas.typesystem.types.TraitType;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef;
+import static org.apache.atlas.typesystem.types.utils.TypesUtil.createRequiredAttrDef;
+import static org.apache.atlas.typesystem.types.utils.TypesUtil.createTraitTypeDef;
+
+public class TraitTest extends BaseTest {
+
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+    }
+
+    /*
+     * Type Hierarchy is:
+     *   A(a,b,c,d)
+     *   B(b) extends A
+     *   C(c) extends A
+     *   D(d) extends B,C
+     *
+     * - There are a total of 11 fields in an instance of D
+     * - an attribute that is hidden by a SubType can referenced by prefixing it with the
+     * complete Path.
+     *   For e.g. the 'b' attribute in A (that is a superType for B) is hidden the 'b' attribute
+     *   in B.
+     *   So it is availabel by the name 'A.B.D.b'
+     *
+     * - Another way to set attributes is to cast. Casting a 'D' instance of 'B' makes the 'A.B.D
+     * .b' attribute
+     *   available as 'A.B.b'. Casting one more time to an 'A' makes the 'A.B.b' attribute
+     *   available as 'b'.
+     */
+    @Test
+    public void test1() throws MetadataException {
+        HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
+                createRequiredAttrDef("a", DataTypes.INT_TYPE),
+                createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
+                createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
+                createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
+        HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
+                createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
+        HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
+                createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
+        HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
+                createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
+
+        defineTraits(A, B, C, D);
+
+        TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D");
+
+        Struct s1 = new Struct("D");
+        s1.set("d", 1);
+        s1.set("c", 1);
+        s1.set("b", true);
+        s1.set("a", 1);
+        s1.set("A.B.D.b", true);
+        s1.set("A.B.D.c", 2);
+        s1.set("A.B.D.d", 2);
+
+        s1.set("A.C.D.a", 3);
+        s1.set("A.C.D.b", false);
+        s1.set("A.C.D.c", 3);
+        s1.set("A.C.D.d", 3);
+
+
+        ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED);
+        Assert.assertEquals(ts.toString(), "{\n" +
+                "\td : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\ta : \t1\n" +
+                "\tA.B.D.b : \ttrue\n" +
+                "\tA.B.D.c : \t2\n" +
+                "\tA.B.D.d : \t2\n" +
+                "\tA.C.D.a : \t3\n" +
+                "\tA.C.D.b : \tfalse\n" +
+                "\tA.C.D.c : \t3\n" +
+                "\tA.C.D.d : \t3\n" +
+                "}");
+
+        /*
+         * cast to B and set the 'b' attribute on A.
+         */
+        TraitType BType = (TraitType) getTypeSystem().getDataType(TraitType.class, "B");
+        IStruct s2 = DType.castAs(ts, "B");
+        s2.set("A.B.b", false);
+
+        Assert.assertEquals(ts.toString(), "{\n" +
+                "\td : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\ta : \t1\n" +
+                "\tA.B.D.b : \tfalse\n" +
+                "\tA.B.D.c : \t2\n" +
+                "\tA.B.D.d : \t2\n" +
+                "\tA.C.D.a : \t3\n" +
+                "\tA.C.D.b : \tfalse\n" +
+                "\tA.C.D.c : \t3\n" +
+                "\tA.C.D.d : \t3\n" +
+                "}");
+
+        /*
+         * cast again to A and set the 'b' attribute on A.
+         */
+        TraitType AType = (TraitType) getTypeSystem().getDataType(TraitType.class, "A");
+        IStruct s3 = BType.castAs(s2, "A");
+        s3.set("b", true);
+        Assert.assertEquals(ts.toString(), "{\n" +
+                "\td : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\ta : \t1\n" +
+                "\tA.B.D.b : \ttrue\n" +
+                "\tA.B.D.c : \t2\n" +
+                "\tA.B.D.d : \t2\n" +
+                "\tA.C.D.a : \t3\n" +
+                "\tA.C.D.b : \tfalse\n" +
+                "\tA.C.D.c : \t3\n" +
+                "\tA.C.D.d : \t3\n" +
+                "}");
+    }
+
+    @Test
+    public void testRandomOrder() throws MetadataException {
+        HierarchicalTypeDefinition A = createTraitTypeDef("A", null,
+                createRequiredAttrDef("a", DataTypes.INT_TYPE),
+                createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
+                createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
+                createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
+        HierarchicalTypeDefinition B = createTraitTypeDef("B", ImmutableList.<String>of("A"),
+                createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE));
+        HierarchicalTypeDefinition C = createTraitTypeDef("C", ImmutableList.<String>of("A"),
+                createOptionalAttrDef("c", DataTypes.BYTE_TYPE));
+        HierarchicalTypeDefinition D = createTraitTypeDef("D", ImmutableList.<String>of("B", "C"),
+                createOptionalAttrDef("d", DataTypes.SHORT_TYPE));
+
+        defineTraits(B, D, A, C);
+
+        TraitType DType = (TraitType) getTypeSystem().getDataType(TraitType.class, "D");
+
+        Struct s1 = new Struct("D");
+        s1.set("d", 1);
+        s1.set("c", 1);
+        s1.set("b", true);
+        s1.set("a", 1);
+        s1.set("A.B.D.b", true);
+        s1.set("A.B.D.c", 2);
+        s1.set("A.B.D.d", 2);
+
+        s1.set("A.C.D.a", 3);
+        s1.set("A.C.D.b", false);
+        s1.set("A.C.D.c", 3);
+        s1.set("A.C.D.d", 3);
+
+
+        ITypedStruct ts = DType.convert(s1, Multiplicity.REQUIRED);
+        Assert.assertEquals(ts.toString(), "{\n" +
+                "\td : \t1\n" +
+                "\tb : \ttrue\n" +
+                "\tc : \t1\n" +
+                "\ta : \t1\n" +
+                "\tA.B.D.b : \ttrue\n" +
+                "\tA.B.D.c : \t2\n" +
+                "\tA.B.D.d : \t2\n" +
+                "\tA.C.D.a : \t3\n" +
+                "\tA.C.D.b : \tfalse\n" +
+                "\tA.C.D.c : \t3\n" +
+                "\tA.C.D.d : \t3\n" +
+                "}");
+
+    }
+
+}
+


Mime
View raw message