atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From venkat...@apache.org
Subject [23/58] [abbrv] incubator-atlas git commit: Refactor packages and scripts to Atlas (cherry picked from commit 414beba)
Date Tue, 16 Jun 2015 23:04:57 GMT
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/java/org/apache/hadoop/metadata/repository/typestore/GraphBackedTypeStoreTest.java
----------------------------------------------------------------------
diff --git a/repository/src/test/java/org/apache/hadoop/metadata/repository/typestore/GraphBackedTypeStoreTest.java b/repository/src/test/java/org/apache/hadoop/metadata/repository/typestore/GraphBackedTypeStoreTest.java
deleted file mode 100755
index 1d2684d..0000000
--- a/repository/src/test/java/org/apache/hadoop/metadata/repository/typestore/GraphBackedTypeStoreTest.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.repository.typestore;
-
-import com.thinkaurelius.titan.core.TitanGraph;
-import com.tinkerpop.blueprints.Direction;
-import com.tinkerpop.blueprints.Edge;
-import com.tinkerpop.blueprints.Vertex;
-import junit.framework.Assert;
-import org.apache.hadoop.metadata.GraphTransaction;
-import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.RepositoryMetadataModule;
-import org.apache.hadoop.metadata.TestUtils;
-import org.apache.hadoop.metadata.repository.graph.GraphHelper;
-import org.apache.hadoop.metadata.repository.graph.GraphProvider;
-import org.apache.hadoop.metadata.typesystem.TypesDef;
-import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.ClassType;
-import org.apache.hadoop.metadata.typesystem.types.DataTypes;
-import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.EnumValue;
-import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.TraitType;
-import org.apache.hadoop.metadata.typesystem.types.TypeSystem;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import javax.inject.Inject;
-import java.util.List;
-
-@Guice(modules = RepositoryMetadataModule.class)
-public class GraphBackedTypeStoreTest {
-    @Inject
-    private GraphProvider<TitanGraph> graphProvider;
-
-    @Inject
-    private ITypeStore typeStore;
-
-    private TypeSystem ts;
-
-    @BeforeClass
-    public void setUp() throws Exception {
-        ts = TypeSystem.getInstance();
-        ts.reset();
-        TestUtils.defineDeptEmployeeTypes(ts);
-    }
-
-    @Test
-    @GraphTransaction
-    public void testStore() throws MetadataException {
-        typeStore.store(ts);
-        dumpGraph();
-    }
-
-    private void dumpGraph() {
-        TitanGraph graph = graphProvider.get();
-        for (Vertex v : graph.getVertices()) {
-            System.out.println("****v = " + GraphHelper.vertexString(v));
-            for (Edge e : v.getEdges(Direction.OUT)) {
-                System.out.println("****e = " + GraphHelper.edgeString(e));
-            }
-        }
-    }
-
-    @Test (dependsOnMethods = "testStore")
-    @GraphTransaction
-    public void testRestore() throws Exception {
-        TypesDef types = typeStore.restore();
-
-        //validate enum
-        List<EnumTypeDefinition> enumTypes = types.enumTypesAsJavaList();
-        Assert.assertEquals(1, enumTypes.size());
-        EnumTypeDefinition orgLevel = enumTypes.get(0);
-        Assert.assertEquals(orgLevel.name, "OrgLevel");
-        Assert.assertEquals(orgLevel.enumValues.length, 2);
-        EnumValue enumValue = orgLevel.enumValues[0];
-        Assert.assertEquals(enumValue.value, "L1");
-        Assert.assertEquals(enumValue.ordinal, 1);
-
-        //validate class
-        List<StructTypeDefinition> structTypes = types.structTypesAsJavaList();
-        Assert.assertEquals(1, structTypes.size());
-
-        boolean clsTypeFound = false;
-        List<HierarchicalTypeDefinition<ClassType>> classTypes = types.classTypesAsJavaList();
-        for (HierarchicalTypeDefinition<ClassType> classType : classTypes) {
-            if (classType.typeName.equals("Manager")) {
-                ClassType expectedType = ts.getDataType(ClassType.class, classType.typeName);
-                Assert.assertEquals(expectedType.immediateAttrs.size(), classType.attributeDefinitions.length);
-                Assert.assertEquals(expectedType.superTypes.size(), classType.superTypes.size());
-                clsTypeFound = true;
-            }
-        }
-        Assert.assertTrue("Manager type not restored", clsTypeFound);
-
-        //validate trait
-        List<HierarchicalTypeDefinition<TraitType>> traitTypes = types.traitTypesAsJavaList();
-        Assert.assertEquals(1, traitTypes.size());
-        HierarchicalTypeDefinition<TraitType> trait = traitTypes.get(0);
-        Assert.assertEquals("SecurityClearance", trait.typeName);
-        Assert.assertEquals(1, trait.attributeDefinitions.length);
-        AttributeDefinition attribute = trait.attributeDefinitions[0];
-        Assert.assertEquals("level", attribute.name);
-        Assert.assertEquals(DataTypes.INT_TYPE.getName(), attribute.dataTypeName);
-
-        //validate the new types
-        ts.reset();
-        ts.defineTypes(types);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala b/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala
new file mode 100755
index 0000000..3c624ae
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/ExpressionTest.scala
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import org.apache.atlas.query.Expressions._
+import org.apache.atlas.repository.BaseTest
+import org.junit.{Before, Test}
+
+class ExpressionTest extends BaseTest {
+
+    @Before
+    override def setup {
+        super.setup
+
+        QueryTestsUtils.setupTypes
+
+    }
+
+    @Test def testClass: Unit = {
+        val e = QueryProcessor.validate(_class("DB"))
+        println(e)
+    }
+
+    @Test def testFilter: Unit = {
+        val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting"))))
+        println(e)
+    }
+
+    @Test def testSelect: Unit = {
+        val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting"))).
+            select(id("name"), id("owner")))
+        println(e)
+    }
+
+    @Test def testNegTypeTest: Unit = {
+        try {
+            val e = QueryProcessor.validate(_class("DB").where(id("name")))
+            println(e)
+        } catch {
+            case e: ExpressionException if e.getMessage.endsWith("expression: DB where name") => ()
+        }
+    }
+
+    @Test def testIsTrait: Unit = {
+        val e = QueryProcessor.validate(_class("DB").where(isTrait("JdbcAccess")))
+        println(e)
+    }
+
+    @Test def testIsTraitNegative: Unit = {
+        try {
+            val e = QueryProcessor.validate(_class("DB").where(isTrait("Jdb")))
+            println(e)
+        } catch {
+            case e: ExpressionException if e.getMessage.endsWith("not a TraitType, expression:  is Jdb") => ()
+        }
+    }
+
+    @Test def testhasField: Unit = {
+        val e = QueryProcessor.validate(_class("DB").where(hasField("name")))
+        println(e)
+    }
+
+    @Test def testHasFieldNegative: Unit = {
+        try {
+            val e = QueryProcessor.validate(_class("DB").where(hasField("nam")))
+            println(e)
+        } catch {
+            case e: ExpressionException if e.getMessage.endsWith("not a TraitType, expression:  is Jdb") => ()
+        }
+    }
+
+    @Test def testFieldReference: Unit = {
+        val e = QueryProcessor.validate(_class("DB").field("Table"))
+        println(e)
+    }
+
+    @Test def testNegFieldReference: Unit = {
+        try {
+            val e = QueryProcessor.validate(_class("DB").where(_class("LoadProcess").hasField("name")))
+            println(e)
+        } catch {
+            case e: ExpressionException
+                if e.getMessage.endsWith("srcType of field doesn't match input type, expression: LoadProcess has name") => ()
+        }
+    }
+
+    @Test def testFieldReferenceRedundant: Unit = {
+        val e = QueryProcessor.validate(_class("DB").where(_class("DB").hasField("name")))
+        println(e)
+    }
+
+    @Test def testBackReference: Unit = {
+        val e = QueryProcessor.validate(
+            _class("DB").as("db1").field("Table").where(id("db1").field("name").`=`(string("Reporting"))))
+        println(e)
+    }
+
+    @Test def testArith: Unit = {
+        val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting"))).
+            select(id("name"), id("createTime") + int(1)))
+        println(e)
+    }
+
+    @Test def testComparisonLogical: Unit = {
+        val e = QueryProcessor.validate(_class("DB").where(id("name").`=`(string("Reporting")).
+            and(id("createTime") + int(1) > int(0))))
+        println(e)
+    }
+
+    @Test def testJoinAndSelect1: Unit = {
+        val e = QueryProcessor.validate(
+            _class("DB").as("db1").field("Table").as("tab").where((id("db1").field("createTime") + int(1) > int(0))
+                .and(id("db1").field("name").`=`(string("Reporting")))).select(id("db1").field("name").as("dbName"),
+                    id("tab").field("name").as("tabName"))
+        )
+        println(e)
+    }
+
+    @Test def testJoinAndSelect2: Unit = {
+        val e = QueryProcessor.validate(
+            _class("DB").as("db1").field("Table").as("tab").where((id("db1").field("createTime") + int(1) > int(0))
+                .or(id("db1").field("name").`=`(string("Reporting"))))
+                .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName"))
+        )
+        println(e)
+    }
+
+    @Test def testJoinAndSelect3: Unit = {
+        val e = QueryProcessor.validate(
+            _class("DB").as("db1").field("Table").as("tab").where((id("db1").field("createTime") + int(1) > int(0))
+                .and(id("db1").field("name").`=`(string("Reporting")))
+                .or(id("db1").hasField("owner")))
+                .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName"))
+        )
+        println(e)
+    }
+
+    @Test def testJoinAndSelect4: Unit = {
+        val e = QueryProcessor.validate(
+            _class("DB") as "db1" join "Table" as "tab" where (
+                id("db1").field("createTime") + int(1) > int(0) and
+                    (id("db1") `.` "name" `=` string("Reporting")) or
+                    (id("db1") hasField "owner")
+                ) select(
+                id("db1") `.` "name" as "dbName", id("tab") `.` "name" as "tabName"
+                )
+        )
+        println(e)
+    }
+
+    @Test def testLineageAll: Unit = {
+        val e = QueryProcessor.validate(_class("Table").loop(id("LoadProcess").field("outputTable")))
+        println(e)
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala b/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala
new file mode 100755
index 0000000..aa41313
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/GremlinTest.scala
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import com.thinkaurelius.titan.core.TitanGraph
+import org.apache.atlas.query.Expressions._
+import org.apache.atlas.typesystem.types.TypeSystem
+import org.junit.runner.RunWith
+import org.scalatest.Matchers._
+import org.scalatest._
+import org.scalatest.junit.JUnitRunner
+
+@RunWith(classOf[JUnitRunner])
+class GremlinTest extends FunSuite with BeforeAndAfterAll with BaseGremlinTest {
+
+    var g: TitanGraph = null
+
+    override def beforeAll() {
+        TypeSystem.getInstance().reset()
+        QueryTestsUtils.setupTypes
+        g = QueryTestsUtils.setupTestGraph
+    }
+
+    override def afterAll() {
+        g.shutdown()
+    }
+
+    test("testClass") {
+        val r = QueryProcessor.evaluate(_class("DB"), g)
+        validateJson(r, "{\n  \"query\":\"DB\",\n  \"dataType\":{\n    \"superTypes\":[\n      \n    ],\n    \"hierarchicalMetaTypeName\":\"org.apache.atlas.typesystem.types.ClassType\",\n    \"typeName\":\"DB\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"name\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"owner\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"createTime\",\n        \"dataTypeName\":\"int\",\n    
     \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"DB\",\n      \"$id$\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"owner\":\"John ETL\",\n      \"name\":\"Sales\",\n      \"createTime\":1000\n    },\n    {\n      \"$typeName$\":\"DB\",\n      \"$id$\":{\n        \"id\":\"7168\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"owner\":\"Jane BI\",\n      \"name\":\"Reporting\",\n      \"createTime\":1500\n    }\n  ]\n}")
+    }
+
+    test("testName") {
+        val r = QueryProcessor.evaluate(_class("DB").field("name"), g)
+        validateJson(r, "{\n  \"query\":\"DB.name\",\n  \"dataType\":\"string\",\n  \"rows\":[\n    \"Sales\",\n    \"Reporting\"\n  ]\n}")
+    }
+
+    test("testFilter") {
+        var r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))), g)
+        validateJson(r, "{\n  \"query\":\"DB where (name = \\\"Reporting\\\")\",\n  \"dataType\":{\n    \"superTypes\":[\n      \n    ],\n    \"hierarchicalMetaTypeName\":\"org.apache.atlas.typesystem.types.ClassType\",\n    \"typeName\":\"DB\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"name\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"owner\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"createTime\",\n      
   \"dataTypeName\":\"int\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"DB\",\n      \"$id$\":{\n        \"id\":\"7168\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"owner\":\"Jane BI\",\n      \"name\":\"Reporting\",\n      \"createTime\":1500\n    }\n  ]\n}")
+    }
+
+    test("testFilter2") {
+        var r = QueryProcessor.evaluate(_class("DB").where(id("DB").field("name").`=`(string("Reporting"))), g)
+        validateJson(r, "{\n  \"query\":\"DB where (name = \\\"Reporting\\\")\",\n  \"dataType\":{\n    \"superTypes\":[\n      \n    ],\n    \"hierarchicalMetaTypeName\":\"org.apache.atlas.typesystem.types.ClassType\",\n    \"typeName\":\"DB\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"name\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"owner\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"createTime\",\n      
   \"dataTypeName\":\"int\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"DB\",\n      \"$id$\":{\n        \"id\":\"7168\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"owner\":\"Jane BI\",\n      \"name\":\"Reporting\",\n      \"createTime\":1500\n    }\n  ]\n}")
+    }
+
+
+    test("testSelect") {
+        val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))).
+            select(id("name"), id("owner")), g)
+        validateJson(r, "{\n  \"query\":\"DB where (name = \\\"Reporting\\\") as _src1 select _src1.name as _col_0, _src1.owner as _col_1\",\n  \"dataType\":{\n    \"typeName\":\"__tempQueryResultStruct1\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"_col_0\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"_col_1\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"__tempQueryResultStruct1\",\n
       \"_col_1\":\"Jane BI\",\n      \"_col_0\":\"Reporting\"\n    }\n  ]\n}")
+    }
+
+    test("testIsTrait") {
+        val r = QueryProcessor.evaluate(_class("Table").where(isTrait("Dimension")), g)
+        validateJson(r, "{\n  \"query\":\"Table where Table is Dimension\",\n  \"dataType\":{\n    \"superTypes\":[\n      \n    ],\n    \"hierarchicalMetaTypeName\":\"org.apache.atlas.typesystem.types.ClassType\",\n    \"typeName\":\"Table\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"name\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"db\",\n        \"dataTypeName\":\"DB\",\n        \"multiplicity\":{\n          \"lower\":1,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"sd\",\n        \"dataTypeName\
 ":\"StorageDesc\",\n        \"multiplicity\":{\n          \"lower\":1,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"3328\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"2304\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"product_dim\",\n      \"$traits$\":{\n        \"Dimension\":{\n          \"$typeName$\":\"Dimension\"\n        }\n      }\n    },\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"4864\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n  
       \"id\":\"3840\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"time_dim\",\n      \"$traits$\":{\n        \"Dimension\":{\n          \"$typeName$\":\"Dimension\"\n        }\n      }\n    },\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"6656\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"5376\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"customer_dim\",\n      \"$traits$\":{\n        \"Dimension\":{\n          \"$typeName$\":\"Dimension\"\n        }\n      }\n    }\n  ]\n}")
+    }
+
+    test("testhasField") {
+        val r = QueryProcessor.evaluate(_class("DB").where(hasField("name")), g)
+        validateJson(r, "{\n  \"query\":\"DB where DB has name\",\n  \"dataType\":{\n    \"superTypes\":[\n      \n    ],\n    \"hierarchicalMetaTypeName\":\"org.apache.atlas.typesystem.types.ClassType\",\n    \"typeName\":\"DB\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"name\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"owner\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"createTime\",\n        \"dataTypeNam
 e\":\"int\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"DB\",\n      \"$id$\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"owner\":\"John ETL\",\n      \"name\":\"Sales\",\n      \"createTime\":1000\n    },\n    {\n      \"$typeName$\":\"DB\",\n      \"$id$\":{\n        \"id\":\"7168\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"owner\":\"Jane BI\",\n      \"name\":\"Reporting\",\n      \"createTime\":1500\n    }\n  ]\n}")
+    }
+
+    test("testFieldReference") {
+        val r = QueryProcessor.evaluate(_class("DB").field("Table"), g)
+        validateJson(r, "{\n  \"query\":\"DB Table\",\n  \"dataType\":{\n    \"superTypes\":[\n      \n    ],\n    \"hierarchicalMetaTypeName\":\"org.apache.atlas.typesystem.types.ClassType\",\n    \"typeName\":\"Table\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"name\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"db\",\n        \"dataTypeName\":\"DB\",\n        \"multiplicity\":{\n          \"lower\":1,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"sd\",\n        \"dataTypeName\":\"StorageDesc\",\n  
       \"multiplicity\":{\n          \"lower\":1,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"2048\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"512\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"sales_fact\"\n    },\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"3328\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"2304\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"2
 56\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"product_dim\",\n      \"$traits$\":{\n        \"Dimension\":{\n          \"$typeName$\":\"Dimension\"\n        }\n      }\n    },\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"4864\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"3840\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"time_dim\",\n      \"$traits$\":{\n        \"Dimension\":{\n          \"$typeName$\":\"Dimension\"\n        }\n      }\n    },\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"6656\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"5376\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n
       },\n      \"db\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"customer_dim\",\n      \"$traits$\":{\n        \"Dimension\":{\n          \"$typeName$\":\"Dimension\"\n        }\n      }\n    },\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"8960\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"7424\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"7168\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"sales_fact_daily_mv\"\n    },\n    {\n      \"$typeName$\":\"Table\",\n      \"$id$\":{\n        \"id\":\"12800\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      },\n      \"sd\":{\n        \"id\":\"11264\",\n        \"$typeName$\":\"StorageDesc\",\n        \"version\":0\n      },\n      \"db\":{\n        \"id\":\"7168\"
 ,\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"name\":\"sales_fact_monthly_mv\"\n    }\n  ]\n}")
+    }
+
+    test("testBackReference") {
+        val r = QueryProcessor.evaluate(
+            _class("DB").as("db").field("Table").where(id("db").field("name").`=`(string("Reporting"))), g)
+        validateJson(r, null)
+    }
+
+    test("testArith") {
+        val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting"))).
+            select(id("name"), id("createTime") + int(1)), g)
+        validateJson(r, "{\n  \"query\":\"DB where (name = \\\"Reporting\\\") as _src1 select _src1.name as _col_0, (_src1.createTime + 1) as _col_1\",\n  \"dataType\":{\n    \"typeName\":\"__tempQueryResultStruct3\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"_col_0\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"_col_1\",\n        \"dataTypeName\":\"int\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"__tempQueryResultStru
 ct3\",\n      \"_col_1\":1501,\n      \"_col_0\":\"Reporting\"\n    }\n  ]\n}")
+    }
+
+    test("testComparisonLogical") {
+        val r = QueryProcessor.evaluate(_class("DB").where(id("name").`=`(string("Reporting")).
+            and(id("createTime") > int(0))), g)
+        validateJson(r, "{\n  \"query\":\"DB where (name = \\\"Reporting\\\") and (createTime > 0)\",\n  \"dataType\":{\n    \"superTypes\":[\n      \n    ],\n    \"hierarchicalMetaTypeName\":\"org.apache.atlas.typesystem.types.ClassType\",\n    \"typeName\":\"DB\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"name\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"owner\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"
 createTime\",\n        \"dataTypeName\":\"int\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"DB\",\n      \"$id$\":{\n        \"id\":\"7168\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"owner\":\"Jane BI\",\n      \"name\":\"Reporting\",\n      \"createTime\":1500\n    }\n  ]\n}")
+    }
+
+    test("testJoinAndSelect1") {
+        val r = QueryProcessor.evaluate(
+            _class("DB").as("db1").where(id("name").`=`(string("Sales"))).field("Table").as("tab").
+                where((isTrait("Dimension"))).
+                select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName")), g
+        )
+        validateJson(r, "{\n  \"query\":\"DB as db1 where (name = \\\"Sales\\\") Table as tab where DB as db1 where (name = \\\"Sales\\\") Table as tab is Dimension as _src1 select db1.name as dbName, tab.name as tabName\",\n  \"dataType\":{\n    \"typeName\":\"__tempQueryResultStruct5\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"dbName\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"tabName\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n  
   ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"__tempQueryResultStruct5\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"product_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct5\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"time_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct5\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"customer_dim\"\n    }\n  ]\n}")
+    }
+
+    test("testJoinAndSelect2") {
+        val r = QueryProcessor.evaluate(
+            _class("DB").as("db1").where((id("db1").field("createTime") > int(0))
+                .or(id("name").`=`(string("Reporting")))).field("Table").as("tab")
+                .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName")), g
+        )
+        validateJson(r, "{\n  \"query\":\"DB as db1 where (db1.createTime > 0) or (name = \\\"Reporting\\\") Table as tab select db1.name as dbName, tab.name as tabName\",\n  \"dataType\":{\n    \"typeName\":\"__tempQueryResultStruct6\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"dbName\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"tabName\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\"
 :\"__tempQueryResultStruct6\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"sales_fact\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct6\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"product_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct6\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"time_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct6\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"customer_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct6\",\n      \"dbName\":\"Reporting\",\n      \"tabName\":\"sales_fact_daily_mv\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct6\",\n      \"dbName\":\"Reporting\",\n      \"tabName\":\"sales_fact_monthly_mv\"\n    }\n  ]\n}")
+    }
+
+    test("testJoinAndSelect3") {
+        val r = QueryProcessor.evaluate(
+            _class("DB").as("db1").where((id("db1").field("createTime") > int(0))
+                .and(id("db1").field("name").`=`(string("Reporting")))
+                .or(id("db1").hasField("owner"))).field("Table").as("tab")
+                .select(id("db1").field("name").as("dbName"), id("tab").field("name").as("tabName")), g
+        )
+        validateJson(r, "{\n  \"query\":\"DB as db1 where (db1.createTime > 0) and (db1.name = \\\"Reporting\\\") or DB as db1 has owner Table as tab select db1.name as dbName, tab.name as tabName\",\n  \"dataType\":{\n    \"typeName\":\"__tempQueryResultStruct7\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"dbName\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"tabName\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\
 n    {\n      \"$typeName$\":\"__tempQueryResultStruct7\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"sales_fact\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct7\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"product_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct7\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"time_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct7\",\n      \"dbName\":\"Sales\",\n      \"tabName\":\"customer_dim\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct7\",\n      \"dbName\":\"Reporting\",\n      \"tabName\":\"sales_fact_daily_mv\"\n    },\n    {\n      \"$typeName$\":\"__tempQueryResultStruct7\",\n      \"dbName\":\"Reporting\",\n      \"tabName\":\"sales_fact_monthly_mv\"\n    }\n  ]\n}")
+    }
+
+    test("testJoinAndSelect4") {
+      val r = QueryProcessor.evaluate(
+        _class("DB").as("db1").where(id("name").`=`(string("Sales"))).field("Table").as("tab").
+          where((isTrait("Dimension"))).
+          select(id("db1").as("dbO"), id("tab").field("name").as("tabName")), g
+      )
+      validateJson(r, "{\n  \"query\":\"DB as db1 where (name = \\\"Sales\\\") Table as tab where DB as db1 where (name = \\\"Sales\\\") Table as tab is Dimension as _src1 select db1 as dbO, tab.name as tabName\",\n  \"dataType\":{\n    \"typeName\":\"\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"dbO\",\n        \"dataTypeName\":\"DB\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"tabName\",\n        \"dataTypeName\":\"string\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$
 typeName$\":\"\",\n      \"dbO\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"tabName\":\"product_dim\"\n    },\n    {\n      \"$typeName$\":\"\",\n      \"dbO\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"tabName\":\"time_dim\"\n    },\n    {\n      \"$typeName$\":\"\",\n      \"dbO\":{\n        \"id\":\"256\",\n        \"$typeName$\":\"DB\",\n        \"version\":0\n      },\n      \"tabName\":\"customer_dim\"\n    }\n  ]\n}")
+    }
+
+    test("testNegativeInvalidType") {
+      val p = new QueryParser
+      val e = p("from blah").right.get
+      an [ExpressionException] should be thrownBy QueryProcessor.evaluate(e, g)
+    }
+
+    test("Bug37860") {
+        val p = new QueryParser
+        val e = p("Table as t where name = 'sales_fact' db where name = 'Sales' and owner = 'John ETL' select t").right.get
+        val r = QueryProcessor.evaluate(e, g)
+        validateJson(r)
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala b/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala
new file mode 100755
index 0000000..ea0b9bb
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/GremlinTest2.scala
@@ -0,0 +1,135 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import com.thinkaurelius.titan.core.TitanGraph
+import org.apache.atlas.query.Expressions._
+import org.apache.atlas.typesystem.types.TypeSystem
+import org.junit.runner.RunWith
+import org.scalatest._
+import org.scalatest.junit.JUnitRunner
+
+@RunWith(classOf[JUnitRunner])
+class GremlinTest2 extends FunSuite with BeforeAndAfterAll with BaseGremlinTest {
+
+  var g: TitanGraph = null
+
+  override def beforeAll() {
+    TypeSystem.getInstance().reset()
+    QueryTestsUtils.setupTypes
+    g = QueryTestsUtils.setupTestGraph
+  }
+
+  override def afterAll() {
+    g.shutdown()
+  }
+
+  test("testTraitSelect") {
+    val r = QueryProcessor.evaluate(_class("Table").as("t").join("Dimension").as("dim").select(id("t"), id("dim")), g)
+    validateJson(r, "{\n  \"query\":\"Table as t.Dimension as dim select t as _col_0, dim as _col_1\",\n  \"dataType\":{\n    \"typeName\":\"\",\n    \"attributeDefinitions\":[\n      {\n        \"name\":\"_col_0\",\n        \"dataTypeName\":\"Table\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      },\n      {\n        \"name\":\"_col_1\",\n        \"dataTypeName\":\"Dimension\",\n        \"multiplicity\":{\n          \"lower\":0,\n          \"upper\":1,\n          \"isUnique\":false\n        },\n        \"isComposite\":false,\n        \"isUnique\":false,\n        \"isIndexable\":true,\n        \"reverseAttributeName\":null\n      }\n    ]\n  },\n  \"rows\":[\n    {\n      \"$typeName$\":\"\",\n      \"_col_1\":{\n        \"$typeName$\":\"Dimension\"\n      },\n      \"_col_0\"
 :{\n        \"id\":\"3328\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      }\n    },\n    {\n      \"$typeName$\":\"\",\n      \"_col_1\":{\n        \"$typeName$\":\"Dimension\"\n      },\n      \"_col_0\":{\n        \"id\":\"4864\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      }\n    },\n    {\n      \"$typeName$\":\"\",\n      \"_col_1\":{\n        \"$typeName$\":\"Dimension\"\n      },\n      \"_col_0\":{\n        \"id\":\"6656\",\n        \"$typeName$\":\"Table\",\n        \"version\":0\n      }\n    }\n  ]\n}")
+  }
+
+  test("testTrait") {
+    val r = QueryProcessor.evaluate(_trait("Dimension"), g)
+    validateJson(r)
+  }
+
+  test("testTraitInstance") {
+    val r = QueryProcessor.evaluate(_trait("Dimension").traitInstance(), g)
+    validateJson(r)
+  }
+
+  test("testInstanceAddedToFilter") {
+    val r = QueryProcessor.evaluate(_trait("Dimension").hasField("typeName"), g)
+    validateJson(r)
+  }
+
+  test("testInstanceFilter") {
+    val r = QueryProcessor.evaluate(_trait("Dimension").traitInstance().hasField("name"), g)
+    validateJson(r)
+  }
+
+  test("testLineageWithPath") {
+    val r = QueryProcessor.evaluate(_class("Table").loop(id("LoadProcess").field("outputTable")).path(), g)
+    validateJson(r)
+  }
+
+  test("testLineageAllSelectWithPath") {
+    val r = QueryProcessor.evaluate(_class("Table").as("src").loop(id("LoadProcess").field("outputTable")).as("dest").
+      select(id("src").field("name").as("srcTable"), id("dest").field("name").as("destTable")).path(), g)
+    validateJson(r)
+  }
+
+  test("testLineageAllSelectWithPathFromParser") {
+    val p = new QueryParser
+    val e = p("Table as src loop (LoadProcess outputTable) as dest " +
+      "select src.name as srcTable, dest.name as destTable withPath").right.get
+    //Table as src loop (LoadProcess where LoadProcess.outputTable) as dest select src.name as srcTable, dest.name as destTable withPath
+    val r = QueryProcessor.evaluate(e, g)
+    validateJson(r)
+  }
+
+  test("testLineageAllSelectWithPathFromParser2") {
+    val p = new QueryParser
+
+    val e = p("Table as src loop (`LoadProcess->outputTable` inputTables) as dest " +
+      "select src.name as srcTable, dest.name as destTable withPath").right.get
+    val r = QueryProcessor.evaluate(e, g)
+    validateJson(r)
+  }
+
+  test("testHighLevelLineage") {
+        val r = HiveLineageQuery("Table", "sales_fact_monthly_mv",
+          "LoadProcess",
+          "inputTables",
+          "outputTable",
+        None, Some(List("name")), true, GraphPersistenceStrategy1, g).evaluate()
+    validateJson(r)
+  }
+
+  test("testHighLevelLineageReturnGraph") {
+    val r = HiveLineageQuery("Table", "sales_fact_monthly_mv",
+      "LoadProcess",
+      "inputTables",
+      "outputTable",
+      None, Some(List("name")), true, GraphPersistenceStrategy1, g).graph
+
+    println(r.toInstanceJson)
+    //validateJson(r)
+  }
+
+  test("testHighLevelWhereUsed") {
+    val r = HiveWhereUsedQuery("Table", "sales_fact",
+      "LoadProcess",
+      "inputTables",
+      "outputTable",
+      None, Some(List("name")), true, GraphPersistenceStrategy1, g).evaluate()
+    validateJson(r)
+  }
+
+  test("testHighLevelWhereUsedReturnGraph") {
+    val r = HiveWhereUsedQuery("Table", "sales_fact",
+      "LoadProcess",
+      "inputTables",
+      "outputTable",
+      None, Some(List("name")), true, GraphPersistenceStrategy1, g).graph
+    println(r.toInstanceJson)
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala b/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala
new file mode 100755
index 0000000..71cbad3
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/HiveTitanSample.scala
@@ -0,0 +1,345 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import java.io.File
+import java.util.UUID
+import java.util.concurrent.atomic.AtomicInteger
+import javax.script.{Bindings, ScriptEngine, ScriptEngineManager}
+
+import com.thinkaurelius.titan.core.TitanGraph
+import com.typesafe.config.ConfigFactory
+import org.apache.commons.io.FileUtils
+
+import scala.collection.mutable.ArrayBuffer
+
+object HiveTitanSample {
+
+    private var nextVertexId: AtomicInteger = new AtomicInteger(0)
+    private var nextEdgeId: AtomicInteger = new AtomicInteger(1000)
+
+    trait Vertex {
+        val _id: String
+
+        def id = _id
+        val version = 0
+        val guid = s"""${UUID.randomUUID()}""".stripMargin
+
+        def addEdge(to: Vertex, label: String, edges: ArrayBuffer[String]): Unit = {
+            edges +=
+                s"""{"_id" : "${nextEdgeId.incrementAndGet()}", "_type" : "edge", "_inV" : "${to.id}", "_outV" : "$id", "_label" : "$label"}"""
+        }
+
+        def toGSon(vertices: ArrayBuffer[String],
+                   edges: ArrayBuffer[String]): Unit = {
+
+            val sb = new StringBuilder
+            sb.append( s"""{"typeName" : "${this.getClass.getSimpleName}", "_type" : "vertex"""")
+
+            this.getClass.getDeclaredFields filter (_.getName != "traits") foreach { f =>
+                f.setAccessible(true)
+                var fV = f.get(this)
+                fV = fV match {
+                    case _: String => s""""$fV""""
+                    case _ => fV
+                }
+
+                fV match {
+                    case x: Vertex => addEdge(x, s"${this.getClass.getSimpleName}.${f.getName}", edges)
+                    case l: List[_] => l.foreach(x => addEdge(x.asInstanceOf[Vertex],
+                        s"${this.getClass.getSimpleName}.${f.getName}", edges))
+                    case _ => sb.append( s""", "${f.getName}" : $fV""")
+                        sb.append( s""", "${this.getClass.getSimpleName}.${f.getName}" : $fV""")
+                }
+            }
+
+            this.getClass.getDeclaredFields filter (_.getName == "traits") foreach { f =>
+                f.setAccessible(true)
+                var traits = f.get(this).asInstanceOf[Option[List[Trait]]]
+
+                if (traits.isDefined) {
+                    val fV = traits.get.map(_.getClass.getSimpleName).mkString(",")
+                    sb.append( s""", "traitNames" : "$fV"""")
+                }
+            }
+
+            sb.append("}")
+            vertices += sb.toString()
+        }
+    }
+
+    trait Trait extends Vertex
+
+    trait Struct extends Vertex
+
+    trait Instance extends Vertex {
+        val traits: Option[List[Trait]]
+
+        override def toGSon(vertices: ArrayBuffer[String],
+                            edges: ArrayBuffer[String]): Unit = {
+            super.toGSon(vertices, edges)
+
+            if (traits.isDefined) {
+                traits.get foreach { t =>
+                    t.toGSon(vertices, edges)
+                    addEdge(t, s"${this.getClass.getSimpleName}.${t.getClass.getSimpleName}", edges)
+                }
+            }
+        }
+
+    }
+
+    case class JdbcAccess(_id: String = "" + nextVertexId.incrementAndGet()) extends Trait
+
+    case class PII(_id: String = "" + nextVertexId.incrementAndGet()) extends Trait
+
+    case class Dimension(_id: String = "" + nextVertexId.incrementAndGet()) extends Trait
+
+    case class Metric(_id: String = "" + nextVertexId.incrementAndGet()) extends Trait
+
+    case class ETL(_id: String = "" + nextVertexId.incrementAndGet()) extends Trait
+
+
+    case class DB(name: String, owner: String, createTime: Int, traits: Option[List[Trait]] = None,
+                  _id: String = "" + nextVertexId.incrementAndGet()) extends Instance
+
+    case class StorageDescriptor(inputFormat: String, outputFormat: String,
+                                 _id: String = "" + nextVertexId.incrementAndGet()) extends Struct
+
+    case class Column(name: String, dataType: String, sd: StorageDescriptor,
+                      traits: Option[List[Trait]] = None,
+                      _id: String = "" + nextVertexId.incrementAndGet()) extends Instance
+
+    case class Table(name: String, db: DB, sd: StorageDescriptor,
+                     traits: Option[List[Trait]] = None,
+                     _id: String = "" + nextVertexId.incrementAndGet()) extends Instance
+
+    case class TableDef(name: String, db: DB, inputFormat: String, outputFormat: String,
+                        columns: List[(String, String, Option[List[Trait]])],
+                        traits: Option[List[Trait]] = None) {
+        val sd = StorageDescriptor(inputFormat, outputFormat)
+        val colDefs = columns map { c =>
+            Column(c._1, c._2, sd, c._3)
+        }
+        val tablDef = Table(name, db, sd, traits)
+
+        def toGSon(vertices: ArrayBuffer[String],
+                   edges: ArrayBuffer[String]): Unit = {
+            sd.toGSon(vertices, edges)
+            colDefs foreach {
+                _.toGSon(vertices, edges)
+            }
+            tablDef.toGSon(vertices, edges)
+        }
+    }
+
+    case class LoadProcess(name: String, inputTables: List[Vertex],
+                           outputTable: Vertex,
+                           traits: Option[List[Trait]] = None,
+                           _id: String = "" + nextVertexId.incrementAndGet()) extends Instance
+
+    case class View(name: String, db: DB, inputTables: List[Vertex],
+                    traits: Option[List[Trait]] = None,
+                    _id: String = "" + nextVertexId.incrementAndGet()) extends Instance
+
+    val salesDB = DB("Sales", "John ETL", 1000)
+    val salesFact = TableDef("sales_fact",
+        salesDB,
+        "TextInputFormat",
+        "TextOutputFormat",
+        List(
+            ("time_id", "int", None),
+            ("product_id", "int", None),
+            ("customer_id", "int", None),
+            ("sales", "double", Some(List(Metric())))
+        ))
+    val productDim = TableDef("product_dim",
+        salesDB,
+        "TextInputFormat",
+        "TextOutputFormat",
+        List(
+            ("product_id", "int", None),
+            ("product_name", "string", None),
+            ("brand_name", "string", None)
+        ),
+        Some(List(Dimension())))
+    val timeDim = TableDef("time_dim",
+        salesDB,
+        "TextInputFormat",
+        "TextOutputFormat",
+        List(
+            ("time_id", "int", None),
+            ("dayOfYear", "int", None),
+            ("weekDay", "string", None)
+        ),
+        Some(List(Dimension())))
+    val customerDim = TableDef("customer_dim",
+        salesDB,
+        "TextInputFormat",
+        "TextOutputFormat",
+        List(
+            ("customer_id", "int", None),
+            ("name", "int", None),
+            ("address", "string", Some(List(PII())))
+        ),
+        Some(List(Dimension())))
+
+    val reportingDB = DB("Reporting", "Jane BI", 1500)
+    val salesFactDaily = TableDef("sales_fact_daily_mv",
+        reportingDB,
+        "TextInputFormat",
+        "TextOutputFormat",
+        List(
+            ("time_id", "int", None),
+            ("product_id", "int", None),
+            ("customer_id", "int", None),
+            ("sales", "double", Some(List(Metric())))
+        ))
+    val loadSalesFactDaily = LoadProcess("loadSalesDaily",
+        List(salesFact.tablDef, timeDim.tablDef), salesFactDaily.tablDef,
+        Some(List(ETL())))
+
+
+    val productDimView = View("product_dim_view", reportingDB,
+        List(productDim.tablDef),
+        Some(List(Dimension(), JdbcAccess())))
+
+    val customerDimView = View("customer_dim_view", reportingDB,
+        List(customerDim.tablDef),
+        Some(List(Dimension(), JdbcAccess())))
+
+    val salesFactMonthly = TableDef("sales_fact_monthly_mv",
+        reportingDB,
+        "TextInputFormat",
+        "TextOutputFormat",
+        List(
+            ("time_id", "int", None),
+            ("product_id", "int", None),
+            ("customer_id", "int", None),
+            ("sales", "double", Some(List(Metric())))
+        ))
+    val loadSalesFactMonthly = LoadProcess("loadSalesMonthly",
+        List(salesFactDaily.tablDef), salesFactMonthly.tablDef,
+        Some(List(ETL())))
+
+
+    val vertices: ArrayBuffer[String] = new ArrayBuffer[String]()
+    val edges: ArrayBuffer[String] = new ArrayBuffer[String]()
+
+    salesDB.toGSon(vertices, edges)
+    salesFact.toGSon(vertices, edges)
+    productDim.toGSon(vertices, edges)
+    timeDim.toGSon(vertices, edges)
+    customerDim.toGSon(vertices, edges)
+
+    reportingDB.toGSon(vertices, edges)
+    salesFactDaily.toGSon(vertices, edges)
+    loadSalesFactDaily.toGSon(vertices, edges)
+    productDimView.toGSon(vertices, edges)
+    customerDimView.toGSon(vertices, edges)
+    salesFactMonthly.toGSon(vertices, edges)
+    loadSalesFactMonthly.toGSon(vertices, edges)
+
+    def toGSon(): String = {
+        s"""{
+        "mode":"NORMAL",
+        "vertices": ${vertices.mkString("[\n\t", ",\n\t", "\n]")},
+        "edges": ${edges.mkString("[\n\t", ",\n\t", "\n]")}
+        }
+        """.stripMargin
+    }
+
+    def writeGson(fileName: String): Unit = {
+        FileUtils.writeStringToFile(new File(fileName), toGSon())
+    }
+
+    val GremlinQueries = List(
+        // 1. List all DBs
+        """g.V.has("typeName", "DB")""",
+
+        // 2. List all DB nmes
+        """g.V.has("typeName", "DB").name""",
+
+        // 3. List all Tables in Reporting DB
+        """g.V.has("typeName", "DB").has("name", "Reporting").inE("Table.db").outV""",
+        """g.V.has("typeName", "DB").as("db").inE("Table.db").outV.and(_().back("db").has("name", "Reporting"))""",
+
+        // 4. List all Tables in Reporting DB, list as D.name, Tbl.name
+        """
+    g.V.has("typeName", "DB").has("name", "Reporting").as("db").inE("Table.db").outV.as("tbl").select{it.name}{it.name}
+        """.stripMargin,
+
+        // 5. List all tables that are Dimensions and have the TextInputFormat
+        """
+    g.V.as("v").and(_().outE("Table.Dimension"), _().out("Table.sd").has("inputFormat", "TextInputFormat")).name
+        """.stripMargin,
+
+        // 6. List all tables that are Dimensions or have the TextInputFormat
+        """
+    g.V.as("v").or(_().outE("Table.Dimension"), _().out("Table.sd").has("inputFormat", "TextInputFormat")).name
+        """.stripMargin,
+
+        // 7. List tables that have at least 1 PII column
+        """
+    g.V.has("typeName", "Table").as("tab").out("Table.sd").in("Column.sd").as("column"). \
+      out("Column.PII").select.groupBy{it.getColumn("tab")}{it.getColumn("column")}{[ "c" : it.size]}.cap.scatter.filter{it.value.c > 0}. \
+      transform{it.key}.name  """.stripMargin
+
+        // 7.a from Table as tab -> g.V.has("typeName", "Table").as("tab")
+        // 7.b sd.Column as column -> out("Table.sd").in("Column.sd").as("column")
+        // 7.c is PII -> out("Column.PII")
+        // 7.d select tab, column -> select{it}{it}
+        // 7.e groupBy tab compute count(column) as c
+        // 7.f where c > 0
+
+        // 7.a Alias(Type("Table"), "tab")
+        // 7b. Field("sd", Alias(Type("Table"), "tab"))
+        //     Alias(Field("Column", Field("sd", Alias(Type("Table"), "tab"))), "column")
+        // 7.c Filter(is("PII"), Alias(Field("Column", Field("sd", Alias(Type("Table"), "tab"))), "column"))
+        // 7.d
+    )
+}
+
+object TestApp extends App with GraphUtils {
+
+    var conf = ConfigFactory.load()
+    conf = conf.getConfig("graphRepo")
+    val g: TitanGraph = titanGraph(conf)
+    val manager: ScriptEngineManager = new ScriptEngineManager
+    val engine: ScriptEngine = manager.getEngineByName("gremlin-groovy")
+    val bindings: Bindings = engine.createBindings
+    bindings.put("g", g)
+
+    val hiveGraphFile = FileUtils.getTempDirectory().getPath + File.separator + System.nanoTime() + ".gson"
+    HiveTitanSample.writeGson(hiveGraphFile)
+    bindings.put("hiveGraphFile", hiveGraphFile)
+
+    try {
+        engine.eval("g.loadGraphSON(hiveGraphFile)", bindings)
+
+        println(engine.eval("g.V.typeName.toList()", bindings))
+
+        HiveTitanSample.GremlinQueries.foreach { q =>
+            println(q)
+            println("Result: " + engine.eval(q + ".toList()", bindings))
+        }
+    } finally {
+        g.shutdown()
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala b/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala
new file mode 100755
index 0000000..5abd797
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/LexerTest.scala
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import org.junit.{Assert, Test}
+
+import scala.util.parsing.input.CharArrayReader
+
+class LexerTest {
+
+    def scan(p: QueryParser, str: String): p.lexical.ParseResult[_] = {
+        val l = p.lexical
+        var s: l.Input = new CharArrayReader(str.toCharArray)
+        var r = (l.whitespace.? ~ l.token)(s)
+        s = r.next
+
+        while (r.successful && !s.atEnd) {
+            s = r.next
+            if (!s.atEnd) {
+                r = (l.whitespace.? ~ l.token)(s)
+            }
+        }
+        r.asInstanceOf[p.lexical.ParseResult[_]]
+    }
+
+    @Test def testSimple {
+        val p = new QueryParser
+        val r = scan(p, """DB where db1.name""")
+        Assert.assertTrue(r.successful)
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala b/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala
new file mode 100755
index 0000000..58f3d98
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/LineageQueryTest.scala
@@ -0,0 +1,515 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import com.thinkaurelius.titan.core.TitanGraph
+import org.apache.atlas.query.Expressions._
+import org.apache.atlas.typesystem.types.TypeSystem
+import org.junit.runner.RunWith
+import org.scalatest.junit.JUnitRunner
+import org.scalatest.{Assertions, BeforeAndAfterAll, FunSuite}
+
+@RunWith(classOf[JUnitRunner])
+class LineageQueryTest extends FunSuite with BeforeAndAfterAll {
+
+    var g: TitanGraph = null
+
+    override def beforeAll() {
+        TypeSystem.getInstance().reset()
+        QueryTestsUtils.setupTypes
+        g = QueryTestsUtils.setupTestGraph
+    }
+
+    override def afterAll() {
+        g.shutdown()
+    }
+
+    val STRUCT_NAME_REGEX = (TypeUtils.TEMP_STRUCT_NAME_PREFIX + "\\d+").r
+    val PREFIX_SPACES_REGEX = ("\\n\\s*").r
+
+    def validateJson(r: GremlinQueryResult, expected: String = null): Unit = {
+        val rJ = r.toJson
+        if (expected != null) {
+            var a = STRUCT_NAME_REGEX.replaceAllIn(rJ, "")
+            a = PREFIX_SPACES_REGEX.replaceAllIn(a, "")
+            var b = STRUCT_NAME_REGEX.replaceAllIn(expected, "")
+            b = PREFIX_SPACES_REGEX.replaceAllIn(b, "")
+            Assertions.assert(a == b)
+        } else {
+            println(rJ)
+        }
+    }
+
+    test("testInputTables") {
+        val r = QueryProcessor.evaluate(_class("LoadProcess").field("inputTables"), g)
+        val x = r.toJson
+        validateJson(r, """{
+  "query":"LoadProcess inputTables",
+  "dataType":{
+    "superTypes":[
+
+    ],
+    "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
+    "typeName":"Table",
+    "attributeDefinitions":[
+      {
+        "name":"name",
+        "dataTypeName":"string",
+        "multiplicity":{
+          "lower":0,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      },
+      {
+        "name":"db",
+        "dataTypeName":"DB",
+        "multiplicity":{
+          "lower":1,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      },
+      {
+        "name":"sd",
+        "dataTypeName":"StorageDesc",
+        "multiplicity":{
+          "lower":1,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      }
+    ]
+  },
+  "rows":[
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"2048",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"512",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"256",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact"
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"4864",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"3840",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"256",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"time_dim",
+      "$traits$":{
+        "Dimension":{
+          "$typeName$":"Dimension"
+        }
+      }
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"8960",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"7424",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_daily_mv"
+    }
+  ]
+}""")
+    }
+
+    test("testLoadProcessOut") {
+        val r = QueryProcessor.evaluate(_class("Table").field("LoadProcess").field("outputTable"), g)
+        validateJson(r, null)
+    }
+
+    test("testLineageAll") {
+        val r = QueryProcessor.evaluate(_class("Table").loop(id("LoadProcess").field("outputTable")), g)
+        validateJson(r, """{
+  "query":"Table as _loop0 loop (LoadProcess outputTable)",
+  "dataType":{
+    "superTypes":[
+
+    ],
+    "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
+    "typeName":"Table",
+    "attributeDefinitions":[
+      {
+        "name":"name",
+        "dataTypeName":"string",
+        "multiplicity":{
+          "lower":0,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      },
+      {
+        "name":"db",
+        "dataTypeName":"DB",
+        "multiplicity":{
+          "lower":1,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      },
+      {
+        "name":"sd",
+        "dataTypeName":"StorageDesc",
+        "multiplicity":{
+          "lower":1,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      }
+    ]
+  },
+  "rows":[
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"8960",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"7424",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_daily_mv"
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"12800",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"11264",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_monthly_mv"
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"8960",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"7424",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_daily_mv"
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"12800",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"11264",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_monthly_mv"
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"12800",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"11264",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_monthly_mv"
+    }
+  ]
+}""")
+    }
+
+    test("testLineageAllSelect") {
+        val r = QueryProcessor.evaluate(_class("Table").as("src").loop(id("LoadProcess").field("outputTable")).as("dest").
+            select(id("src").field("name").as("srcTable"), id("dest").field("name").as("destTable")), g)
+        validateJson(r, """{
+  "query":"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as destTable",
+  "dataType":{
+    "typeName":"__tempQueryResultStruct2",
+    "attributeDefinitions":[
+      {
+        "name":"srcTable",
+        "dataTypeName":"string",
+        "multiplicity":{
+          "lower":0,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      },
+      {
+        "name":"destTable",
+        "dataTypeName":"string",
+        "multiplicity":{
+          "lower":0,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      }
+    ]
+  },
+  "rows":[
+    {
+      "$typeName$":"__tempQueryResultStruct2",
+      "srcTable":"sales_fact",
+      "destTable":"sales_fact_daily_mv"
+    },
+    {
+      "$typeName$":"__tempQueryResultStruct2",
+      "srcTable":"sales_fact",
+      "destTable":"sales_fact_monthly_mv"
+    },
+    {
+      "$typeName$":"__tempQueryResultStruct2",
+      "srcTable":"time_dim",
+      "destTable":"sales_fact_daily_mv"
+    },
+    {
+      "$typeName$":"__tempQueryResultStruct2",
+      "srcTable":"time_dim",
+      "destTable":"sales_fact_monthly_mv"
+    },
+    {
+      "$typeName$":"__tempQueryResultStruct2",
+      "srcTable":"sales_fact_daily_mv",
+      "destTable":"sales_fact_monthly_mv"
+    }
+  ]
+}""")
+    }
+
+    test("testLineageFixedDepth") {
+        val r = QueryProcessor.evaluate(_class("Table").loop(id("LoadProcess").field("outputTable"), int(1)), g)
+        validateJson(r, """{
+  "query":"Table as _loop0 loop (LoadProcess outputTable) times 1",
+  "dataType":{
+    "superTypes":[
+
+    ],
+    "hierarchicalMetaTypeName":"org.apache.atlas.typesystem.types.ClassType",
+    "typeName":"Table",
+    "attributeDefinitions":[
+      {
+        "name":"name",
+        "dataTypeName":"string",
+        "multiplicity":{
+          "lower":0,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      },
+      {
+        "name":"db",
+        "dataTypeName":"DB",
+        "multiplicity":{
+          "lower":1,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      },
+      {
+        "name":"sd",
+        "dataTypeName":"StorageDesc",
+        "multiplicity":{
+          "lower":1,
+          "upper":1,
+          "isUnique":false
+        },
+        "isComposite":false,
+        "isUnique":false,
+        "isIndexable":true,
+        "reverseAttributeName":null
+      }
+    ]
+  },
+  "rows":[
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"8960",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"7424",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_daily_mv"
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"8960",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"7424",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_daily_mv"
+    },
+    {
+      "$typeName$":"Table",
+      "$id$":{
+        "id":"12800",
+        "$typeName$":"Table",
+        "version":0
+      },
+      "sd":{
+        "id":"11264",
+        "$typeName$":"StorageDesc",
+        "version":0
+      },
+      "db":{
+        "id":"7168",
+        "$typeName$":"DB",
+        "version":0
+      },
+      "name":"sales_fact_monthly_mv"
+    }
+  ]
+}""")
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala b/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala
new file mode 100755
index 0000000..602b2b5
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/ParserTest.scala
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import org.apache.atlas.repository.BaseTest
+import org.junit.{Before, Test}
+
+
+class ParserTest extends BaseTest {
+
+    @Before
+    override def setup {
+        super.setup
+
+        QueryTestsUtils.setupTypes
+
+    }
+
+    @Test def testFrom: Unit = {
+        val p = new QueryParser
+        println(p("from DB").right.get.toString)
+    }
+
+    @Test def testFrom2: Unit = {
+        val p = new QueryParser
+        println(p("DB").right.get.toString)
+    }
+
+    @Test def testJoin1: Unit = {
+        val p = new QueryParser
+        println(p("DB, Table").right.get.toString)
+    }
+
+    @Test def testWhere1: Unit = {
+        val p = new QueryParser
+        println(p("DB as db1 Table where db1.name ").right.get.toString)
+    }
+
+    @Test def testWhere2: Unit = {
+        val p = new QueryParser
+        println(p("DB name = \"Reporting\"").right.get.toString)
+    }
+
+  @Test def testIsTrait: Unit = {
+    val p = new QueryParser
+    println(p("Table isa Dimension").right.get.toString)
+    println(p("Table is Dimension").right.get.toString)
+  }
+
+    @Test def test4: Unit = {
+        val p = new QueryParser
+        println(p("DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1").right.get.toString)
+    }
+
+    @Test def testJoin2: Unit = {
+        val p = new QueryParser
+        println(p("DB as db1 where (createTime + 1) > 0 and (db1.name = \"Reporting\") or DB has owner Table as tab " +
+            " select db1.name as dbName, tab.name as tabName").right.get.toString)
+    }
+
+    @Test def testLoop: Unit = {
+        val p = new QueryParser
+        println(p("Table loop (LoadProcess outputTable)").right.get.toString)
+    }
+
+  @Test def testNegInvalidateType: Unit = {
+    val p = new QueryParser
+    val x = p("from blah")
+    println(p("from blah").left)
+  }
+
+    @Test def testPath1: Unit = {
+      val p = new QueryParser
+      println(p("Table loop (LoadProcess outputTable) withPath").right.get.toString)
+    }
+
+    @Test def testPath2: Unit = {
+      val p = new QueryParser
+      println(p(
+        "Table as src loop (LoadProcess outputTable) as dest " +
+          "select src.name as srcTable, dest.name as destTable withPath").right.get.toString
+      )
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala
----------------------------------------------------------------------
diff --git a/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala b/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala
new file mode 100755
index 0000000..d79758a
--- /dev/null
+++ b/repository/src/test/scala/org/apache/atlas/query/QueryTestsUtils.scala
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.query
+
+import java.io.File
+import javax.script.{Bindings, ScriptEngine, ScriptEngineManager}
+
+import com.google.common.collect.ImmutableList
+import com.thinkaurelius.titan.core.{TitanFactory, TitanGraph}
+import com.tinkerpop.blueprints.Vertex
+import com.typesafe.config.{Config, ConfigFactory}
+import org.apache.atlas.typesystem.types._
+import org.apache.commons.configuration.{Configuration, ConfigurationException, MapConfiguration}
+import org.apache.commons.io.FileUtils
+import org.scalatest.{Assertions, BeforeAndAfterAll, FunSuite}
+
+trait GraphUtils {
+
+    import scala.collection.JavaConversions._
+
+    def getConfiguration(config: Config): Configuration = {
+        val keys = config.entrySet().map {
+            _.getKey
+        }
+        val gConfig: java.util.Map[String, String] = new java.util.HashMap[String, String]()
+        keys.foreach { k =>
+            gConfig.put(k, config.getString(k))
+        }
+        return new MapConfiguration(gConfig)
+    }
+
+
+    def titanGraph(conf: Config) = {
+        try {
+            val g = TitanFactory.open(getConfiguration(conf))
+            val mgmt = g.getManagementSystem
+            val typname = mgmt.makePropertyKey("typeName").dataType(classOf[String]).make()
+            mgmt.buildIndex("byTypeName", classOf[Vertex]).addKey(typname).buildCompositeIndex()
+            mgmt.commit()
+            g
+        } catch {
+            case e: ConfigurationException => throw new RuntimeException(e)
+        }
+    }
+}
+
+object QueryTestsUtils extends GraphUtils {
+
+    def setupTypes: Unit = {
+        def attrDef(name: String, dT: IDataType[_],
+                    m: Multiplicity = Multiplicity.OPTIONAL,
+                    isComposite: Boolean = false,
+                    reverseAttributeName: String = null) = {
+            require(name != null)
+            require(dT != null)
+            new AttributeDefinition(name, dT.getName, m, isComposite, reverseAttributeName)
+        }
+
+        def dbClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "DB", null,
+            Array(
+                attrDef("name", DataTypes.STRING_TYPE),
+                attrDef("owner", DataTypes.STRING_TYPE),
+                attrDef("createTime", DataTypes.INT_TYPE)
+            ))
+
+        def storageDescClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "StorageDesc", null,
+            Array(
+                attrDef("inputFormat", DataTypes.STRING_TYPE),
+                attrDef("outputFormat", DataTypes.STRING_TYPE)
+            ))
+
+        def columnClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "Column", null,
+            Array(
+                attrDef("name", DataTypes.STRING_TYPE),
+                attrDef("dataType", DataTypes.STRING_TYPE),
+                new AttributeDefinition("sd", "StorageDesc", Multiplicity.REQUIRED, false, null)
+            ))
+
+        def tblClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "Table", null,
+            Array(
+                attrDef("name", DataTypes.STRING_TYPE),
+                new AttributeDefinition("db", "DB", Multiplicity.REQUIRED, false, null),
+                new AttributeDefinition("sd", "StorageDesc", Multiplicity.REQUIRED, false, null)
+            ))
+
+        def loadProcessClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "LoadProcess", null,
+            Array(
+                attrDef("name", DataTypes.STRING_TYPE),
+                new AttributeDefinition("inputTables", DataTypes.arrayTypeName("Table"), Multiplicity.COLLECTION, false, null),
+                new AttributeDefinition("outputTable", "Table", Multiplicity.REQUIRED, false, null)
+            ))
+
+        def viewClsDef = new HierarchicalTypeDefinition[ClassType](classOf[ClassType], "View", null,
+            Array(
+                attrDef("name", DataTypes.STRING_TYPE),
+                new AttributeDefinition("inputTables", DataTypes.arrayTypeName("Table"), Multiplicity.COLLECTION, false, null)
+            ))
+
+        def dimTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "Dimension", null,
+            Array[AttributeDefinition]())
+        def piiTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "PII", null,
+            Array[AttributeDefinition]())
+        def metricTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "Metric", null,
+            Array[AttributeDefinition]())
+        def etlTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "ETL", null,
+            Array[AttributeDefinition]())
+        def jdbcTraitDef = new HierarchicalTypeDefinition[TraitType](classOf[TraitType], "JdbcAccess", null,
+            Array[AttributeDefinition]())
+
+        TypeSystem.getInstance().defineTypes(ImmutableList.of[StructTypeDefinition],
+            ImmutableList.of[HierarchicalTypeDefinition[TraitType]](dimTraitDef, piiTraitDef,
+                metricTraitDef, etlTraitDef, jdbcTraitDef),
+            ImmutableList.of[HierarchicalTypeDefinition[ClassType]](dbClsDef, storageDescClsDef, columnClsDef, tblClsDef,
+                loadProcessClsDef, viewClsDef))
+
+        ()
+    }
+
+    def setupTestGraph: TitanGraph = {
+        var conf = ConfigFactory.load()
+        conf = conf.getConfig("graphRepo")
+        val g = titanGraph(conf)
+        val manager: ScriptEngineManager = new ScriptEngineManager
+        val engine: ScriptEngine = manager.getEngineByName("gremlin-groovy")
+        val bindings: Bindings = engine.createBindings
+        bindings.put("g", g)
+
+        val hiveGraphFile = FileUtils.getTempDirectory().getPath.toString + File.separator + System.nanoTime() + ".gson"
+        HiveTitanSample.writeGson(hiveGraphFile)
+        bindings.put("hiveGraphFile", hiveGraphFile)
+
+        engine.eval("g.loadGraphSON(hiveGraphFile)", bindings)
+        g
+    }
+
+}
+
+trait BaseGremlinTest {
+  self : FunSuite with BeforeAndAfterAll =>
+
+  val STRUCT_NAME_REGEX = (TypeUtils.TEMP_STRUCT_NAME_PREFIX + "\\d+").r
+  def validateJson(r: GremlinQueryResult, expected: String = null): Unit = {
+    val rJ = r.toJson
+    if (expected != null) {
+      val a = STRUCT_NAME_REGEX.replaceAllIn(rJ, "")
+      val b = STRUCT_NAME_REGEX.replaceAllIn(expected, "")
+      Assertions.assert(a == b)
+    } else {
+      println(rJ)
+    }
+  }
+
+}



Mime
View raw message