atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jma...@apache.org
Subject [22/51] [abbrv] incubator-atlas git commit: Refactor packages and scripts to Atlas (cherry picked from commit 414beba)
Date Sun, 14 Jun 2015 17:45:02 GMT
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/atlas/tools/thrift/ThriftTypesGen.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/atlas/tools/thrift/ThriftTypesGen.scala b/tools/src/main/scala/org/apache/atlas/tools/thrift/ThriftTypesGen.scala
new file mode 100755
index 0000000..c020d46
--- /dev/null
+++ b/tools/src/main/scala/org/apache/atlas/tools/thrift/ThriftTypesGen.scala
@@ -0,0 +1,316 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.tools.thrift
+
+import com.google.common.collect.ImmutableList
+import org.apache.atlas.MetadataException
+import org.apache.atlas.typesystem.TypesDef
+import org.apache.atlas.typesystem.types.{DataTypes, HierarchicalTypeDefinition, Multiplicity, TraitType, _}
+import org.slf4j.{Logger, LoggerFactory}
+
+import scala.io.Source
+import scala.util.{Failure, Success, Try}
+
+
+case class CompositeRelation(typeName: String, fieldName: String, reverseFieldName: Option[String])
+
+/**
+ * Convert a [[ThriftDef ThriftDef]] to
+ * [[TypesDef TypesDef]]. Currently there are several restrictions:
+ *
+ * - CppIncludes, SEnums are not allowed
+ * - The only include allowed is that of "share/fb303/if/fb303.thrift". This include is ignored.
+ * Any other include will trigger an exception
+ * - Namespaces, TypeDefs, Contants, Unions, Exceptions, and Service definitions are ignored.
+ * - So for fields typeDefs are not applied.
+ * - Field Constant values are ignored.
+ * - Type Annotations, XSD information is ignored.
+ *
+ * Thrift Structs can be mapped to Structs, Traits for Classes. The caller can specify their preference by
+ * providing the structNames, classNames and thriftNames parameters. A Struct that is not in one of these 3
+ * lists is not mapped.
+ *
+ * The ThriftDef doesn't specify if a relationship is composite. For e.g. in the thrift definition
+ * {{{
+ *     struct Person {
+        1: string       name,
+        2: Address   addr,
+      }
+      struct Address {
+        1: string       street,
+        2: string   city,
+      }
+ * }}}
+ *
+ * If Person and Address are mapped to classes, you may not to make the Person -> Address a Composite relation.
+ * The caller can specify these in the 'compositeRelations' parameter.
+ *
+ */
+class ThriftTypesGen(val structNames: List[String], val classNames: List[String], val traitNames: List[String],
+                     val compositeRelations: List[CompositeRelation]) {
+
+    private val LOG: Logger = LoggerFactory.getLogger(classOf[ThriftTypesGen])
+    private val FB_INCLUDE = "share/fb303/if/fb303.thrift"
+    /**
+     * for a (typeName, fieldName) specifies (isComposite, reverseName)
+     * if entry doesn't exist than field is not composite.
+     */
+    private var compositeRelsMap: Map[(String, String), (Boolean, Option[String])] = Map()
+
+    def apply(thriftResource: String): TypesDef = {
+        val tDef = parseThrift(thriftResource)
+
+        tDef.flatMap(buildCompositeRelations).flatMap(typesDef) match {
+            case Success(t) => t
+            case Failure(v) => throw v
+        }
+
+    }
+
+    def buildCompositeRelations(thriftDef: ThriftDef): Try[ThriftDef] = Try {
+
+        compositeRelations.foreach { cr =>
+
+            val sDef = thriftDef.structs.find(_.name == cr.typeName)
+            if (!sDef.isDefined) {
+                throw new MetadataException(s"Unknown Struct (${cr.typeName}) specified in CompositeRelation")
+
+            }
+            val fDef = sDef.get.fields.find(_.name == cr.fieldName)
+            if (!fDef.isDefined) {
+                throw new MetadataException(s"Unknown Field (${cr.fieldName}) specified in CompositeRelation")
+
+            }
+
+            compositeRelsMap = compositeRelsMap + ((cr.typeName, cr.fieldName) ->(true, cr.reverseFieldName))
+
+            if (cr.reverseFieldName.isDefined) {
+                val reverseStructName = dataTypeName(fDef.get.fieldType)
+                val reverseStructDef = thriftDef.structs.find(_.name == reverseStructName)
+                if (!reverseStructDef.isDefined) {
+                    throw new MetadataException(s"Cannot find Struct $reverseStructName in CompositeRelation $cr")
+                }
+                val rfDef = reverseStructDef.get.fields.find(_.name == cr.reverseFieldName)
+                if (!rfDef.isDefined) {
+                    throw new MetadataException(s"Unknown Reverse Field (${cr.reverseFieldName}) specified in CompositeRelation")
+                }
+
+                List(cr, CompositeRelation(reverseStructName, cr.reverseFieldName.get, Some(cr.fieldName)))
+
+                compositeRelsMap = compositeRelsMap +
+                    ((reverseStructName, cr.reverseFieldName.get) ->(false, Some(cr.fieldName)))
+            }
+        }
+
+        thriftDef
+    }
+
+    def typesDef(thriftDef: ThriftDef): Try[TypesDef] = {
+        var tDef: Try[TypesDef] = Try {
+            TypesDef(Seq(), Seq(), Seq(), Seq())
+        }
+
+        tDef.flatMap((t: TypesDef) => includes(t, thriftDef.includes)).flatMap((t: TypesDef) => cppIncludes(t, thriftDef.cppIncludes))
+
+        tDef = tDef.flatMap((t: TypesDef) => includes(t, thriftDef.includes)).
+            flatMap((t: TypesDef) => cppIncludes(t, thriftDef.cppIncludes)).
+            flatMap((t: TypesDef) => namespaces(t, thriftDef.namespaces)).
+            flatMap((t: TypesDef) => constants(t, thriftDef.constants)).
+            flatMap((t: TypesDef) => senums(t, thriftDef.senums)).
+            flatMap((t: TypesDef) => enums(t, thriftDef.enums)).
+            flatMap((t: TypesDef) => structs(t, thriftDef.structs)).
+            flatMap((t: TypesDef) => unions(t, thriftDef.unions)).
+            flatMap((t: TypesDef) => exceptions(t, thriftDef.xceptions)).
+            flatMap((t: TypesDef) => services(t, thriftDef.services))
+
+
+        tDef
+    }
+
+    private def parseThrift(thriftResource: String): Try[ThriftDef] = {
+        Try {
+            LOG.debug("Parsing Thrift resource {}", thriftResource)
+            val is = getClass().getResourceAsStream(thriftResource)
+            val src: Source = Source.fromInputStream(is)
+            val thriftStr: String = src.getLines().mkString("\n")
+            val p = new ThriftParser
+            var thriftDef: Option[ThriftDef] = p(thriftStr)
+            thriftDef match {
+                case Some(s) => s
+                case None => {
+                    LOG.debug("Parse for thrift resource {} failed", thriftResource)
+                    throw new MetadataException(s"Failed to parse thrift resource: $thriftResource")
+                }
+            }
+        }
+    }
+
+    @throws[MetadataException]
+    private def dataTypeName(fT: FieldType): String = fT match {
+        case IdentifierType(n) => n
+        case BaseType(typ, _) => BASE_TYPES.toPrimitiveTypeName(typ)
+        case ListType(elemType, _, _) => DataTypes.arrayTypeName(dataTypeName(elemType))
+        case SetType(elemType, _, _) => DataTypes.arrayTypeName(dataTypeName(elemType))
+        case MapType(keyType, valueType, _, _) => DataTypes.mapTypeName(dataTypeName(keyType), dataTypeName(valueType))
+    }
+
+    private def enumValue(e: EnumValueDef, defId: Int): EnumValue = e match {
+        case EnumValueDef(value, Some(id), _) => new EnumValue(value, id.value)
+        case EnumValueDef(value, None, _) => new EnumValue(value, defId)
+    }
+
+    private def enumDef(td: TypesDef, e: EnumDef): Try[TypesDef] = {
+        Success(
+            td.copy(enumTypes = td.enumTypes :+
+                new EnumTypeDefinition(e.name, e.enumValues.zipWithIndex.map(t => enumValue(t._1, -t._2)): _*))
+        )
+    }
+
+    private def includeDef(td: TypesDef, i: IncludeDef): Try[TypesDef] = {
+        Try {
+            if (i.value != FB_INCLUDE) {
+                throw new MetadataException(s"Unsupported Include ${i.value}, only fb303.thrift is currently allowed.")
+            }
+            td
+        }
+    }
+
+    private def cppIncludeDef(td: TypesDef, i: CppIncludeDef): Try[TypesDef] = {
+        Try {
+            throw new MetadataException(s"Unsupported CppInclude ${i.value}.")
+        }
+    }
+
+    private def namespaceDef(td: TypesDef, i: NamespaceDef): Try[TypesDef] = {
+        Try {
+            LOG.debug(s"Ignoring Namespace definition $i")
+            td
+        }
+    }
+
+    private def constantDef(td: TypesDef, i: ConstDef): Try[TypesDef] = {
+        Try {
+            LOG.debug(s"Ignoring ConstantDef definition $i")
+            td
+        }
+    }
+
+    private def senumDef(td: TypesDef, i: SEnumDef): Try[TypesDef] = {
+        Try {
+            throw new MetadataException(s"Unsupported SEnums ${i}.")
+        }
+    }
+
+    private def fieldDef(typName: String, fd: FieldDef): AttributeDefinition = {
+        val name: String = fd.name
+        val dTName: String = dataTypeName(fd.fieldType)
+
+        var m: Multiplicity = Multiplicity.OPTIONAL
+
+        if (fd.requiredNess) {
+            m = Multiplicity.REQUIRED
+        }
+
+        fd.fieldType match {
+            case _: ListType => m = Multiplicity.COLLECTION
+            case _: SetType => m = Multiplicity.SET
+            case _ => ()
+        }
+
+        var isComposite = false
+        var reverseAttrName: String = null
+
+        val r = compositeRelsMap.get((typName, name))
+        if (r.isDefined) {
+            isComposite = r.get._1
+            if (r.get._2.isDefined) {
+                reverseAttrName = r.get._2.get
+            }
+        }
+        new AttributeDefinition(name, dTName, m, isComposite, reverseAttrName)
+    }
+
+    private def structDef(td: TypesDef, structDef: StructDef): Try[TypesDef] = Try {
+        val typeName: String = structDef.name
+
+        typeName match {
+            case t if structNames contains t => td.copy(structTypes = td.structTypes :+
+                new StructTypeDefinition(typeName, structDef.fields.map(fieldDef(typeName, _)).toArray))
+            case t: String if traitNames contains t => {
+                val ts = td.traitTypes :+
+                    new HierarchicalTypeDefinition[TraitType](classOf[TraitType],
+                        typeName, ImmutableList.of[String](), structDef.fields.map(fieldDef(typeName, _)).toArray)
+                td.copy(traitTypes = ts)
+            }
+            case t: String if classNames contains t => {
+                val cs = td.classTypes :+
+                    new HierarchicalTypeDefinition[ClassType](classOf[ClassType],
+                        typeName, ImmutableList.of[String](), structDef.fields.map(fieldDef(typeName, _)).toArray)
+                td.copy(classTypes = cs)
+            }
+            case _ => td
+        }
+    }
+
+    private def unionDef(td: TypesDef, i: UnionDef): Try[TypesDef] = {
+        Try {
+            LOG.debug(s"Ignoring Union definition $i")
+            td
+        }
+    }
+
+    private def exceptionDef(td: TypesDef, i: ExceptionDef): Try[TypesDef] = {
+        Try {
+            LOG.debug(s"Ignoring Exception definition $i")
+            td
+        }
+    }
+
+    private def serviceDef(td: TypesDef, i: ServiceDef): Try[TypesDef] = {
+        Try {
+            LOG.debug(s"Ignoring Service definition $i")
+            td
+        }
+    }
+
+    private def applyList[T](fn: (TypesDef, T) => Try[TypesDef])(td: TypesDef, l: List[T]): Try[TypesDef] = {
+        l.foldLeft[Try[TypesDef]](Success(td))((b, a) => b.flatMap({ Unit => fn(td, a)}))
+    }
+
+    private def includes = applyList(includeDef) _
+
+    private def cppIncludes = applyList(cppIncludeDef) _
+
+    private def namespaces = applyList(namespaceDef) _
+
+    private def constants = applyList(constantDef) _
+
+    private def enums = applyList(enumDef) _
+
+    private def senums = applyList(senumDef) _
+
+    private def structs = applyList(structDef) _
+
+    private def unions = applyList(unionDef) _
+
+    private def exceptions = applyList(exceptionDef) _
+
+    private def services = applyList(serviceDef) _
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/DynamicTypedStruct.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/DynamicTypedStruct.scala b/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/DynamicTypedStruct.scala
deleted file mode 100755
index 2670321..0000000
--- a/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/DynamicTypedStruct.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.tools.dsl
-
-import org.apache.hadoop.metadata.typesystem.ITypedStruct
-import org.apache.hadoop.metadata.typesystem.types.{StructType, TypeSystem}
-
-import scala.language.dynamics
-
-class DynamicTypedStruct(val ts: ITypedStruct) extends Dynamic {
-    def selectDynamic(name: String) = ts.get(name)
-
-    def updateDynamic(name: String)(value: Any) {
-        var value1 = value
-        if (value != null && value.isInstanceOf[DynamicTypedStruct]) {
-            value1 = value.asInstanceOf[DynamicTypedStruct].ts
-        }
-        ts.set(name, value1)
-    }
-
-    def dataType = TypeSystem.getInstance().getDataType(classOf[StructType], ts.getTypeName)
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/package.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/package.scala b/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/package.scala
deleted file mode 100755
index ba09342..0000000
--- a/tools/src/main/scala/org/apache/hadoop/metadata/tools/dsl/package.scala
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata
-
-import java.text.SimpleDateFormat
-
-import org.apache.hadoop.metadata.typesystem.json.{BigDecimalSerializer, BigIntegerSerializer, Serialization, TypedStructSerializer}
-import org.apache.hadoop.metadata.repository.memory.MemRepository
-import org.apache.hadoop.metadata.tools.dsl.DynamicTypedStruct
-import org.apache.hadoop.metadata.typesystem.persistence.StructInstance
-import org.apache.hadoop.metadata.typesystem.types._
-import org.apache.hadoop.metadata.typesystem.{IStruct, ITypedStruct}
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.{write => swrite}
-
-import scala.language.implicitConversions
-import scala.collection.JavaConversions._
-
-package object dsl {
-
-    val defFormat = new DefaultFormats {
-        override protected def dateFormatter = TypeSystem.getInstance().getDateFormat;
-
-        override val typeHints = NoTypeHints
-    }
-
-    implicit val formats = defFormat + new TypedStructSerializer +
-        new BigDecimalSerializer + new BigIntegerSerializer
-    val BOOLEAN_TYPE = DataTypes.BOOLEAN_TYPE
-    val BYTE_TYPE = DataTypes.BYTE_TYPE
-    val SHORT_TYPE = DataTypes.SHORT_TYPE
-    val INT_TYPE = DataTypes.INT_TYPE
-    val LONG_TYPE = DataTypes.LONG_TYPE
-    val FLOAT_TYPE = DataTypes.FLOAT_TYPE
-    val DOUBLE_TYPE = DataTypes.DOUBLE_TYPE
-    val BIGINT_TYPE = DataTypes.BIGINTEGER_TYPE
-    val BIGDECIMAL_TYPE = DataTypes.BIGDECIMAL_TYPE
-    val DATE_TYPE = DataTypes.DATE_TYPE
-    val STRING_TYPE = DataTypes.STRING_TYPE
-    val ATTR_OPTIONAL = Multiplicity.OPTIONAL
-    val ATTR_REQUIRED = Multiplicity.REQUIRED
-
-    def repo = new MemRepository(ts)
-
-    def arrayType(dT: IDataType[_]) = ts.defineArrayType(dT)
-
-    def mapType(kT: IDataType[_], vT: IDataType[_]) = ts.defineMapType(kT, vT)
-
-    def attrDef(name: String, dT: IDataType[_],
-                m: Multiplicity = Multiplicity.OPTIONAL,
-                isComposite: Boolean = false,
-                reverseAttributeName: String = null) = {
-        require(name != null)
-        require(dT != null)
-        new AttributeDefinition(name, dT.getName, m, isComposite, reverseAttributeName)
-    }
-
-    def listTypes = (ts.getTypeNames -- ts.getCoreTypes).sorted.toList.mkString("[", ",", "]")
-
-    def ts = TypeSystem.getInstance
-
-    def defineStructType(name: String, attrDef: AttributeDefinition*) = {
-        require(name != null)
-        ts.defineStructType(name, false, attrDef: _*)
-    }
-
-    def createInstance(typeName: String, jsonStr: String)(implicit formats: Formats) = {
-        val j = parse(jsonStr)
-        assert(j.isInstanceOf[JObject])
-        var j1 = j.asInstanceOf[JObject]
-        j1 = JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) :: j1.obj)
-        new DynamicTypedStruct(Extraction.extract[StructInstance](j1))
-    }
-
-    def createInstance(typeName: String) = {
-        new DynamicTypedStruct(
-            ts.getDataType(classOf[StructType], typeName).asInstanceOf[IConstructableType[IStruct, ITypedStruct]].createInstance())
-    }
-
-    implicit def dynTypedStructToTypedStruct(s: DynamicTypedStruct) = s.ts
-
-    implicit def dynTypedStructToJson(s: DynamicTypedStruct)(implicit formats: Formats) = {
-        Extraction.decompose(s.ts)(formats)
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/Main.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/Main.scala b/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/Main.scala
deleted file mode 100755
index c95ca99..0000000
--- a/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/Main.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.tools.simpleserver
-
-import akka.actor.{ActorSystem, Props}
-import akka.io.IO
-import com.typesafe.config.ConfigFactory
-import org.apache.hadoop.metadata.repository.memory.MemRepository
-import org.apache.hadoop.metadata.typesystem.types.TypeSystem
-import spray.can.Http
-
-/**
- * A Simple Spray based server to test the TypeSystem and MemRepository.
- *
- * @example {{{
- *              -- Using the [[ https://github.com/jakubroztocil/httpie Httpie tool]]
- *
- *              http GET localhost:9140/listTypeNames
- *              pbpaste | http PUT localhost:9140/defineTypes
- *              http GET localhost:9140/typeDetails typeNames:='["Department", "Person", "Manager"]'
- *
- *              pbpaste | http PUT localhost:9140/createInstance
- *              pbpaste | http GET localhost:9140/getInstance
- *          }}}
- *
- *          - On the Mac, pbpaste makes available what is copied to clipboard. Copy contents of resources/sampleTypes.json
- *          - for createInstance resources/sampleInstance.json is an example
- *          - for getInstance send an Id back, you can copy the output from createInstance.
- *
- */
-object Main extends App {
-    val config = ConfigFactory.load()
-    val host = config.getString("http.host")
-    val port = config.getInt("http.port")
-
-    implicit val system = ActorSystem("metadataservice")
-
-    val typSys = TypeSystem.getInstance()
-    val memRepo = new MemRepository(typSys)
-
-    val api = system.actorOf(Props(new RestInterface(typSys, memRepo)), "httpInterface")
-    IO(Http) ! Http.Bind(listener = api, interface = host, port = port)
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/MetadataActor.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/MetadataActor.scala b/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/MetadataActor.scala
deleted file mode 100755
index 6878411..0000000
--- a/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/MetadataActor.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.tools.simpleserver
-
-import akka.actor._
-import akka.util.Timeout
-import com.google.common.collect.ImmutableList
-import org.apache.hadoop.metadata.repository.memory.MemRepository
-import org.apache.hadoop.metadata.typesystem.json._
-import org.apache.hadoop.metadata.typesystem.persistence.Id
-import org.apache.hadoop.metadata.typesystem.types._
-import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, TypesDef}
-import org.json4s.{Formats, NoTypeHints}
-import spray.httpx.Json4sSupport
-
-import scala.concurrent.duration._
-
-
-class MetadataActor(val typeSystem: TypeSystem, val memRepository: MemRepository) extends Actor with ActorLogging {
-
-  import org.apache.hadoop.metadata.tools.simpleserver.MetadataProtocol._
-
-  import scala.collection.JavaConversions._
-  import scala.language.postfixOps
-    implicit val timeout = Timeout(5 seconds)
-
-
-    def receive = {
-        case ListTypeNames() =>
-            sender ! TypeNames(typeSystem.getTypeNames.toList)
-
-        case GetTypeDetails(typeNames) =>
-            val typesDef = TypesSerialization.convertToTypesDef(typeSystem, (d: IDataType[_]) => typeNames.contains(d.getName))
-            sender ! TypeDetails(typesDef)
-
-        case DefineTypes(typesDef: TypesDef) =>
-            typesDef.enumTypes.foreach(typeSystem.defineEnumType(_))
-
-            typeSystem.defineTypes(ImmutableList.copyOf(typesDef.structTypes.toArray),
-                ImmutableList.copyOf(typesDef.traitTypes.toArray),
-                ImmutableList.copyOf(typesDef.classTypes.toArray))
-
-            var newTypes: List[HierarchicalType[_ <: HierarchicalType[_ <: AnyRef, _], _]] = Nil
-            typesDef.traitTypes.foreach { tDef =>
-                val nm = tDef.typeName
-                newTypes = newTypes :+
-                    typeSystem.getDataType(classOf[HierarchicalType[_ <: HierarchicalType[_ <: AnyRef, _], _]], nm)
-            }
-            typesDef.classTypes.foreach { tDef =>
-                val nm = tDef.typeName
-                newTypes = newTypes :+
-                    typeSystem.getDataType(classOf[HierarchicalType[_ <: HierarchicalType[_ <: AnyRef, _], _]], nm)
-            }
-
-            memRepository.defineTypes(newTypes)
-
-            sender ! TypesCreated
-
-        case CreateInstance(i) =>
-            val r = memRepository.create(i)
-            sender ! InstanceCreated(r.getId)
-
-        case GetInstance(id) =>
-            val r = memRepository.get(id)
-            sender ! InstanceDetails(r)
-    }
-
-}
-
-object MetadataProtocol {
-
-    case class ListTypeNames()
-
-    case class TypeNames(typeNames: List[String])
-
-    case class GetTypeDetails(typeNames: List[String])
-
-    case class TypeDetails(types: TypesDef)
-
-    case class DefineTypes(types: TypesDef)
-
-    case class TypesCreated()
-
-    case class CreateInstance(i: ITypedReferenceableInstance)
-
-    case class InstanceCreated(id: Id)
-
-    case class GetInstance(id: Id)
-
-    case class InstanceDetails(i: ITypedReferenceableInstance)
-
-}
-
-
-trait Json4sProtocol extends Json4sSupport {
-    val typeSystem: TypeSystem
-    val memRepository: MemRepository
-
-    implicit def json4sFormats: Formats =
-        org.json4s.native.Serialization.formats(NoTypeHints) + new MultiplicitySerializer +
-            new TypedStructSerializer +
-            new TypedReferenceableInstanceSerializer +
-            new BigDecimalSerializer + new BigIntegerSerializer + new IdSerializer
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/RestInterface.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/RestInterface.scala b/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/RestInterface.scala
deleted file mode 100755
index b7f68fa..0000000
--- a/tools/src/main/scala/org/apache/hadoop/metadata/tools/simpleserver/RestInterface.scala
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.tools.simpleserver
-
-import akka.actor._
-import akka.util.Timeout
-import org.apache.hadoop.metadata.repository.memory.MemRepository;
-import org.apache.hadoop.metadata.typesystem.{TypesDef, ITypedReferenceableInstance}
-import org.apache.hadoop.metadata.typesystem.persistence.Id
-import org.apache.hadoop.metadata.typesystem.types.TypeSystem
-import spray.http.StatusCodes
-import spray.routing._
-import scala.concurrent.duration._
-
-class Responder(val typeSystem: TypeSystem, val memRepository : MemRepository,
-                requestContext:RequestContext, mdSvc:ActorRef) extends Actor with Json4sProtocol with ActorLogging {
-    import org.apache.hadoop.metadata.tools.simpleserver.MetadataProtocol._
-
-    def receive = {
-
-        case typNames:TypeNames =>
-            requestContext.complete(StatusCodes.OK, typNames)
-            self ! PoisonPill
-
-        case tD:TypeDetails =>
-            requestContext.complete(StatusCodes.OK, tD)
-            self ! PoisonPill
-
-        case TypesCreated =>
-            requestContext.complete(StatusCodes.OK)
-            self ! PoisonPill
-
-        case InstanceCreated(id) =>
-            requestContext.complete(StatusCodes.OK, id)
-
-        case InstanceDetails(i) =>
-            requestContext.complete(StatusCodes.OK, i)
-    }
-}
-
-class RestInterface(val typeSystem: TypeSystem, val memRepository : MemRepository) extends HttpServiceActor
-with RestApi {
-    def receive = runRoute(routes)
-}
-
-
-trait RestApi extends HttpService with Json4sProtocol with ActorLogging { actor: Actor =>
-    import MetadataProtocol._
-    import scala.language.postfixOps
-    import scala.concurrent.ExecutionContext.Implicits.global
-
-    val typeSystem : TypeSystem
-    val memRepository : MemRepository
-
-    implicit val timeout = Timeout(10 seconds)
-
-    import akka.pattern.{ask, pipe}
-
-    val mdSvc = context.actorOf(Props(new MetadataActor(typeSystem, memRepository)))
-
-    def routes: Route =
-
-        path("listTypeNames") {
-            get { requestContext =>
-                val responder: ActorRef = createResponder(requestContext)
-
-                pipe(mdSvc.ask(ListTypeNames))
-
-                mdSvc.ask(ListTypeNames()).pipeTo(responder)
-            }
-        } ~
-            path("typeDetails") {
-                get {
-                    entity(as[GetTypeDetails]) { typeDetails => requestContext =>
-                        val responder = createResponder(requestContext)
-                        mdSvc.ask(typeDetails).pipeTo(responder)
-                    }
-                }
-            } ~
-            path("defineTypes") {
-                put {
-                    entity(as[TypesDef]) { typesDef => requestContext =>
-                        val responder = createResponder(requestContext)
-                        mdSvc.ask(DefineTypes(typesDef)).pipeTo(responder)
-                    }
-                }
-            } ~
-            path("createInstance") {
-                put {
-                    entity(as[ITypedReferenceableInstance]) { i => requestContext =>
-                        val responder = createResponder(requestContext)
-                        mdSvc.ask(CreateInstance(i)).pipeTo(responder)
-                    }
-                }
-            } ~
-            path("getInstance") {
-                get {
-                    entity(as[Id]) { id => requestContext =>
-                        val responder = createResponder(requestContext)
-                        mdSvc.ask(GetInstance(id)).pipeTo(responder)
-                    }
-                }
-            }
-
-    def createResponder(requestContext:RequestContext) = {
-        context.actorOf(Props(new Responder(typeSystem, memRepository, requestContext, mdSvc)))
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftParser.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftParser.scala b/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftParser.scala
deleted file mode 100755
index fa0ea1e..0000000
--- a/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftParser.scala
+++ /dev/null
@@ -1,665 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.tools.thrift
-
-import org.apache.hadoop.metadata.MetadataException
-import org.apache.hadoop.metadata.typesystem.types.DataTypes
-
-import scala.util.parsing.combinator.{ImplicitConversions, PackratParsers}
-import scala.util.parsing.combinator.lexical.StdLexical
-import scala.util.parsing.combinator.syntactical.StandardTokenParsers
-
-import scala.util.parsing.input.CharArrayReader._
-
-object BASE_TYPES extends Enumeration {
-    val STRING = Value("string")
-    val BINARY = Value("binary")
-    val SLIST = Value("slist")
-    val BOOLEAN = Value("bool")
-    val BYTE = Value("byte")
-    val I16 = Value("i16")
-    val I32 = Value("i32")
-    val I64 = Value("i64")
-    val DOUBLE = Value("double")
-
-    @throws[MetadataException]
-    def toPrimitiveTypeName(t : BASE_TYPES.Value) : String = t match {
-        case STRING => DataTypes.STRING_TYPE.getName
-        case SLIST => DataTypes.STRING_TYPE.getName
-        case BOOLEAN => DataTypes.BOOLEAN_TYPE.getName
-        case BYTE => DataTypes.BYTE_TYPE.getName
-        case I16 => DataTypes.SHORT_TYPE.getName
-        case I32 => DataTypes.INT_TYPE.getName
-        case I64 => DataTypes.LONG_TYPE.getName
-        case DOUBLE => DataTypes.DOUBLE_TYPE.getName
-        case _ => throw new MetadataException(s"Thrift BaseType ($t) not supported")
-    }
-}
-
-object THRIFT_LANG extends Enumeration {
-    val CPP       = Value("cpp")
-    val PHP       = Value("php")
-    val PY        = Value("py")
-    val PERL      = Value("perl")
-    val RUBY      = Value("ruby")
-    val SMLTK_CAT = Value("smalltalk.category")
-    val SMLTK_PRE = Value("smalltalk.prefix")
-    val JAVA      = Value("java")
-    val COCOA     = Value("cocoa")
-    val XSD       = Value("xsd")
-    val CSHARP    = Value("csharp")
-    val STAR      = Value("*")
-    val OTHER     = Value("")
-}
-
-case class TypeAnnotation(name : String, value : String)
-case class CPPType(name : String)
-sealed trait FunctionType
-case class VoidType() extends FunctionType
-sealed trait FieldType extends FunctionType
-case class IdentifierType(name : String) extends FieldType
-case class BaseType(typ : BASE_TYPES.Value, typAnnotations :Option[List[TypeAnnotation]]) extends FieldType
-sealed trait ContainerType extends FieldType {
-    def typAnnotations :Option[List[TypeAnnotation]]
-}
-case class MapType(keyType : FieldType, valueType : FieldType,
-                   cppType : Option[CPPType],
-                   typAnnotations :Option[List[TypeAnnotation]]) extends ContainerType
-case class SetType(elemType : FieldType,
-                   cppType : Option[CPPType],
-                   typAnnotations :Option[List[TypeAnnotation]]) extends ContainerType
-case class ListType(elemType : FieldType,
-                    cppType : Option[CPPType],
-                    typAnnotations :Option[List[TypeAnnotation]]) extends ContainerType
-
-sealed trait ConstValue
-case class IntConstant(value : Int) extends ConstValue
-case class DoubleConstant(value : Double) extends ConstValue
-case class StringConstant(value : String) extends ConstValue
-case class IdConstant(value : String) extends ConstValue
-case class ConstantList(value : List[ConstValue]) extends ConstValue
-case class ConstantValuePair(first : ConstValue, second : ConstValue)
-case class ConstantMap(value : List[ConstantValuePair]) extends ConstValue
-
-case class ConstDef(fieldType : FieldType, id : String, value : ConstValue)
-
-case class TypeDef(name : String, fieldType : FieldType,
-                   typAnnotations :Option[List[TypeAnnotation]])
-case class EnumValueDef(value : String, id : Option[IntConstant], typAnnotations :Option[List[TypeAnnotation]])
-case class EnumDef(name : String, enumValues : List[EnumValueDef], typAnnotations :Option[List[TypeAnnotation]])
-
-case class SEnumDef(name : String, enumValues : List[String], typAnnotations :Option[List[TypeAnnotation]])
-
-case class FieldDef(id : Option[IntConstant], requiredNess : Boolean, fieldType : FieldType, name : String,
-                    fieldValue : Option[ConstValue], xsdOptional : Boolean, xsdNillable : Boolean,
-                    xsdAttributes: Option[XsdAttributes],
-                    typAnnotations :Option[List[TypeAnnotation]])
-
-case class XsdAttributes(fields : List[FieldDef])
-
-case class StructDef(name : String, xsdAll : Boolean, fields : List[FieldDef],
-                     typAnnotations :Option[List[TypeAnnotation]])
-
-case class UnionDef(val name : String, val xsdAll : Boolean,
-                    val fields : List[FieldDef],
-                    val typAnnotations :Option[List[TypeAnnotation]])
-
-case class ExceptionDef(val name : String,
-                        val fields : List[FieldDef],
-                        val typAnnotations :Option[List[TypeAnnotation]])
-
-case class FunctionDef(oneway : Boolean, returnType : FunctionType, name : String, parameters : List[FieldDef],
-                       throwFields : Option[List[FieldDef]], typAnnotations :Option[List[TypeAnnotation]])
-
-case class ServiceDef(name : String, superName : Option[String], functions : List[FunctionDef],
-                      typAnnotations :Option[List[TypeAnnotation]])
-
-case class IncludeDef(value : String)
-case class CppIncludeDef(val value : String)
-case class NamespaceDef(lang : THRIFT_LANG.Value, name : String, otherLang : Option[String] = None)
-
-case class ThriftDef(val includes : List[IncludeDef],
-                     val cppIncludes : List[CppIncludeDef],
-                     val namespaces : List[NamespaceDef],
-                     val constants : List[ConstDef],
-                     val typedefs : List[TypeDef],
-                     val enums : List[EnumDef],
-                     val senums : List[SEnumDef],
-                     val structs : List[StructDef],
-                     val unions : List[UnionDef],
-                     val xceptions : List[ExceptionDef],
-                     val services : List[ServiceDef]) {
-
-    def this() = this(List(), List(), List(), List(), List(), List(), List(),
-        List(), List(), List(), List())
-
-    def this(a : IncludeDef) = this(a :: Nil, List(), List(), List(), List(), List(), List(),
-        List(), List(), List(), List())
-    def this(a : CppIncludeDef) = this(List(), a :: Nil, List(), List(), List(), List(), List(), List(),
-        List(), List(), List())
-    def this(a : NamespaceDef) = this(List(), List(), a :: Nil, List(), List(), List(), List(), List(), List(),
-        List(), List())
-    def this(a : ConstDef) = this(List(), List(), List(), a :: Nil, List(), List(), List(), List(), List(), List(),
-        List())
-    def this(a : TypeDef) = this(List(), List(), List(), List(), a :: Nil, List(), List(), List(), List(), List(), List())
-    def this(a : EnumDef) = this(List(), List(), List(), List(), List(), a :: Nil, List(), List(),
-        List(), List(), List())
-    def this(a : SEnumDef) = this(List(), List(), List(), List(), List(), List(), a :: Nil, List(),
-        List(), List(), List())
-    def this(a : StructDef) = this(List(), List(), List(), List(), List(), List(), List(), a :: Nil,
-        List(), List(), List())
-    def this(a : UnionDef) = this(List(), List(), List(), List(), List(), List(), List(),
-        List(), a :: Nil, List(), List())
-    def this(a : ExceptionDef) = this(List(), List(), List(), List(), List(), List(), List(),
-        List(), List(), a :: Nil, List())
-    def this(a : ServiceDef) = this(List(), List(), List(), List(), List(), List(), List(),
-        List(), List(), List(), a :: Nil)
-
-
-    def plus(a : IncludeDef) = ThriftDef(includes.+:(a), cppIncludes, namespaces, constants, typedefs, enums, senums,
-        structs, unions, xceptions, services)
-    def plus(a : CppIncludeDef) = ThriftDef(includes, cppIncludes.+:(a), namespaces, constants, typedefs, enums, senums,
-        structs, unions, xceptions, services)
-    def plus(a : NamespaceDef) = ThriftDef(includes, cppIncludes, namespaces.+:(a), constants, typedefs, enums, senums,
-        structs, unions, xceptions, services)
-    def plus(a : ConstDef) = ThriftDef(includes, cppIncludes, namespaces, constants.+:(a), typedefs, enums, senums,
-        structs, unions, xceptions, services)
-    def plus(a : TypeDef) = ThriftDef(includes, cppIncludes, namespaces, constants, typedefs.+:(a), enums, senums,
-        structs, unions, xceptions, services)
-    def plus(a : EnumDef) = ThriftDef(includes, cppIncludes, namespaces, constants, typedefs, enums.+:(a), senums,
-        structs, unions, xceptions, services)
-    def plus(a : SEnumDef) = ThriftDef(includes, cppIncludes, namespaces, constants, typedefs, enums, senums.+:(a),
-        structs, unions, xceptions, services)
-    def plus(a : StructDef) = ThriftDef(includes, cppIncludes, namespaces, constants, typedefs, enums, senums,
-        structs.+:(a), unions, xceptions, services)
-    def plus(a : UnionDef) = ThriftDef(includes, cppIncludes, namespaces, constants, typedefs, enums, senums,
-        structs, unions.+:(a), xceptions, services)
-    def plus(a : ExceptionDef) = ThriftDef(includes, cppIncludes, namespaces, constants, typedefs, enums, senums,
-        structs, unions, xceptions.+:(a), services)
-    def plus(a : ServiceDef) = ThriftDef(includes, cppIncludes, namespaces, constants, typedefs, enums, senums,
-        structs, unions, xceptions, services.+:(a))
-    def plus(a : ThriftDef) = ThriftDef(includes ::: a.includes,
-        cppIncludes ::: a.cppIncludes,
-        namespaces ::: a.namespaces,
-        constants ::: a.constants,
-        typedefs ::: a.typedefs,
-        enums ::: a.enums,
-        senums ::: a.senums,
-        structs ::: a.structs,
-        unions ::: a.unions,
-        xceptions ::: a.xceptions,
-        services ::: a.services)
-
-
-}
-
-trait ThriftKeywords {
-    this : StandardTokenParsers =>
-
-    import scala.language.implicitConversions
-
-    protected case class Keyword(str: String)
-
-    protected implicit def asParser(k: Keyword): Parser[String] = k.str
-
-    protected val LPAREN      = Keyword("(")
-    protected val RPAREN      = Keyword(")")
-    protected val EQ          = Keyword("=")
-    protected val CPP_TYPE    = Keyword("cpp_type")
-    protected val LIST        = Keyword("list")
-    protected val LT          = Keyword("<")
-    protected val GT          = Keyword(">")
-    protected val SET         = Keyword("set")
-    protected val MAP         = Keyword("map")
-    protected val STRING      = Keyword("string")
-    protected val BINARY      = Keyword("binary")
-    protected val SLIST       = Keyword("slist")
-    protected val BOOL        = Keyword("bool")
-    protected val BYTE        = Keyword("byte")
-    protected val I16         = Keyword("i16")
-    protected val I32         = Keyword("i32")
-    protected val I64         = Keyword("i64")
-    protected val DOUBLE      = Keyword("double")
-    protected val VOID        = Keyword("void")
-    protected val REQUIRED    = Keyword("required")
-    protected val OPTIONAL    = Keyword("optional")
-    protected val COLON       = Keyword(":")
-    protected val THROWS      = Keyword("throws")
-    protected val ONEWAY      = Keyword("oneway")
-    protected val EXTENDS     = Keyword("extends")
-    protected val SERVICE      = Keyword("service")
-    protected val EXCEPTION   = Keyword("exception")
-    protected val LBRACKET    = Keyword("{")
-    protected val RBRACKET    = Keyword("}")
-    protected val XSD_ATTRS   = Keyword("xsd_attributes")
-    protected val XSD_NILBLE  = Keyword("xsd_nillable")
-    protected val XSD_OPT     = Keyword("xsd_optional")
-    protected val XSD_ALL     = Keyword("xsd_all")
-    protected val UNION       = Keyword("union")
-    protected val LSQBRACKET  = Keyword("[")
-    protected val RSQBRACKET  = Keyword("]")
-    protected val CONST       = Keyword("const")
-    protected val STRUCT      = Keyword("struct")
-    protected val SENUM       = Keyword("senum")
-    protected val ENUM        = Keyword("enum")
-    protected val COMMA       = Keyword(",")
-    protected val SEMICOLON   = Keyword(";")
-    protected val TYPEDEF     = Keyword("typedef")
-    protected val INCLUDE     = Keyword("include")
-    protected val CPP_INCL    = Keyword("cpp_include")
-    protected val NAMESPACE   = Keyword("namespace")
-    protected val STAR        = Keyword("*")
-    protected val CPP_NS      = Keyword("cpp_namespace")
-    protected val PHP_NS      = Keyword("php_namespace")
-    protected val PY_NS       = Keyword("py_module")
-    protected val PERL_NS     = Keyword("perl_package")
-    protected val RUBY_NS     = Keyword("ruby_namespace")
-    protected val SMLTK_CAT   = Keyword("smalltalk_category")
-    protected val SMLTK_PRE   = Keyword("smalltalk_prefix")
-    protected val JAVA_NS     = Keyword("java_package")
-    protected val COCOA_NS    = Keyword("cocoa_package")
-    protected val XSD_NS      = Keyword("xsd_namespace")
-    protected val CSHARP_NS   = Keyword("csharp_namespace")
-
-    def isRequired(r : Option[String]) = r match {
-        case Some(REQUIRED) => true
-        case _ => false
-    }
-
-    def isXsdOptional(r : Option[String]) = r match {
-        case Some(XSD_OPT) => true
-        case _ => false
-    }
-
-    def isXsdNillable(r : Option[String]) = r match {
-        case Some(XSD_NILBLE) => true
-        case _ => false
-    }
-
-    def isXsdAll(r : Option[String]) = r match {
-        case Some(XSD_ALL) => true
-        case _ => false
-    }
-
-    def isOneWay(r : Option[String]) = r match {
-        case Some(ONEWAY) => true
-        case _ => false
-    }
-
-}
-
-trait ThriftTypeRules extends ThriftKeywords {
-    this : StandardTokenParsers  =>
-
-    def containterType : Parser[ContainerType] = mapType | setType | listType
-
-    def setType = SET ~ cppType.? ~ LT ~ fieldType ~ GT ~ typeAnnotations.? ^^ {
-        case s ~ ct ~ lt ~ t ~ gt ~ tA => SetType(t, ct, tA)
-    }
-
-    def listType = LIST ~ LT ~ fieldType ~ GT ~ cppType.? ~ typeAnnotations.? ^^ {
-        case l ~ lt ~ t ~ gt ~ ct ~ tA => ListType(t, ct, tA)
-    }
-
-    def mapType = MAP ~ cppType.? ~ LT ~ fieldType ~ COMMA ~ fieldType ~ GT ~ typeAnnotations.? ^^ {
-        case s ~ ct ~ lt ~ kt ~ c ~ vt ~ gt ~ tA => MapType(kt, vt, ct, tA)
-    }
-
-    def cppType : Parser[CPPType] = CPP_TYPE ~ stringLit ^^ { case c ~ s => CPPType(s)}
-
-    def fieldType: Parser[FieldType]  = ident ^^ {case i => IdentifierType(i)} |
-        baseType |
-        containterType
-
-    def baseType : Parser[BaseType] = simpleBaseType ~ typeAnnotations.? ^^ { case s ~ t => BaseType(s, t)}
-
-    def simpleBaseType : Parser[BASE_TYPES.Value] = STRING ^^^ BASE_TYPES.STRING |
-        BINARY ^^^ BASE_TYPES.BINARY |
-        SLIST ^^^ BASE_TYPES.SLIST |
-        BOOL ^^^ BASE_TYPES.BOOLEAN |
-        BYTE ^^^ BASE_TYPES.BYTE |
-        I16 ^^^ BASE_TYPES.I16 |
-        I32 ^^^ BASE_TYPES.I32 |
-        I64 ^^^ BASE_TYPES.I64 |
-        DOUBLE ^^^ BASE_TYPES.DOUBLE
-
-    def typeAnnotations : Parser[List[TypeAnnotation]] =
-        LPAREN ~ typeAnnotation.* ~ RPAREN ^^ { case l ~ t ~ r => t.toList}
-
-    def typeAnnotation : Parser[TypeAnnotation] =
-        (ident ~ EQ ~ stringLit ~ commaOrSemicolon.?) ^^ { case i ~ e ~ s ~ c  => TypeAnnotation(i,s)}
-
-    def commaOrSemicolon : Parser[String] = COMMA | SEMICOLON
-
-}
-
-/**
- * @todo extract Constant Rules into this Trait. This requires moving `hexConstant` here. But how to specify
- *       type of `HexConstant`, it is a Path dependent Type tied to lexical member of ThriftParser.
- */
-trait ThriftConstantRules extends ThriftKeywords {
-    this: StandardTokenParsers =>
-
-    //  def parseDouble(s: String) = try { Some(s.toDouble) } catch { case _ : Throwable => None }
-    //
-    //  def constValue : Parser[ConstValue] = numericLit ^^ {
-    //    case n => parseDouble(n) match {
-    //      case Some(d) => DoubleConstant(d)
-    //      case _ => IntConstant(n.toInt)
-    //    }
-    //  } |
-    //    hexConstant ^^ { case h => IntConstant(Integer.parseInt(h, 16))} |
-    //    stringLit ^^ { case s => StringConstant(s)} |
-    //    ident ^^ { case i => IdConstant(i)} |
-    //    constList |
-    //    constMap
-    //
-    //  def constValuePair = constValue ~ COLON ~ constValue ~ commaOrSemicolon.? ^^ {
-    //    case k ~ c ~ v ~ cs => ConstantValuePair(k,v)
-    //  }
-    //
-    //  def constList = LSQBRACKET ~ (constValue <~ commaOrSemicolon).* ~ RSQBRACKET ^^ {
-    //    case l ~ vs ~ r => ConstantList(vs)
-    //  }
-    //
-    //  def constMap = LBRACKET ~ constValuePair.* ~ RBRACKET ^^ {
-    //    case l ~ ps ~ r => ConstantMap(ps)
-    //  }
-}
-
-/**
- * A Parser for Thrift definition scripts.
- * Based on [[https://github.com/twitter/commons/blob/master/src/antlr/twitter/thrift/descriptors/AntlrThrift.g]].
- * Definition is parsed into a [[org.apache.hadoop.metadata.tools.thrift.ThriftDef ThriftDef]] structure.
- *
- *  @example {{{
- *  var p = new ThriftParser
- *  var td : Option[ThriftDef] = p("""include "share/fb303/if/fb303.thrift"
- *                namespace java org.apache.hadoop.hive.metastore.api
- *                namespace php metastore
- *                namespace cpp Apache.Hadoop.Hive
- *                \""")
- *  }}}
- *
- * @todo doesn't traverse includes directives. Includes are parsed into
- *       [[org.apache.hadoop.metadata.tools.thrift.IncludeDef IncludeDef]] structures
- *       but are not traversed.
- * @todo mixing in [[scala.util.parsing.combinator.PackratParsers PackratParsers]] is a placeholder. Need to
- *       change specific grammar rules to `lazy val` and `Parser[Elem]` to `PackratParser[Elem]`. Will do based on
- *       performance analysis.
- * @todo Error reporting
- */
-class ThriftParser extends StandardTokenParsers with ThriftKeywords with ThriftTypeRules with PackratParsers {
-
-    import scala.language.higherKinds
-
-    private val reservedWordsDelims : Seq[String] =
-        this
-            .getClass
-            .getMethods
-            .filter(_.getReturnType == classOf[Keyword])
-            .map(_.invoke(this).asInstanceOf[Keyword].str)
-
-    private val (thriftreservedWords : Seq[String], thriftdelims : Seq[String]) =
-        reservedWordsDelims.partition(s => s.charAt(0).isLetter)
-
-    override val lexical = new ThriftLexer(thriftreservedWords, thriftdelims)
-
-    import lexical.HexConstant
-    /** A parser which matches a hex constant */
-    def hexConstant: Parser[String] =
-        elem("string literal", _.isInstanceOf[HexConstant]) ^^ (_.chars)
-
-    def apply(input: String): Option[ThriftDef] = {
-        phrase(program)(new lexical.Scanner(input)) match {
-            case Success(r, x) => Some(r)
-            case Failure(m, x) => {
-                None
-            }
-            case Error(m, x) => {
-                None
-            }
-        }
-    }
-
-    def program = headers ~ definitions ^^ { case h ~ d => h plus d}
-
-    def headers = header.*  ^^ { case l => l.foldRight(new ThriftDef)((a,t) => t plus a)}
-
-    def header = INCLUDE ~> stringLit ^^ { case s => new ThriftDef(IncludeDef(s))} |
-        CPP_INCL ~> stringLit ^^ { case s => new ThriftDef(CppIncludeDef(s))} |
-        NAMESPACE ~ ident ~ ident ^^ { case ns ~ t ~ n => new ThriftDef(NamespaceDef(THRIFT_LANG.OTHER, n, Some(t)))} |
-        NAMESPACE ~ STAR ~ ident ^^ { case ns ~ s ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.STAR, i))} |
-        CPP_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.CPP, i))} |
-        PHP_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.PHP, i))} |
-        PY_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.PY, i))} |
-        PERL_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.PERL, i))} |
-        RUBY_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.RUBY, i))} |
-        SMLTK_CAT ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.SMLTK_CAT, i))} |
-        SMLTK_PRE ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.SMLTK_PRE, i))} |
-        JAVA_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.JAVA, i))} |
-        COCOA_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.COCOA, i))} |
-        XSD_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.XSD, i))} |
-        CSHARP_NS ~ ident ^^ { case ns ~ i => new ThriftDef(NamespaceDef(THRIFT_LANG.CSHARP, i))}
-
-    def definitions : Parser[ThriftDef] = definition.*  ^^ {
-        case l => l.foldRight(new ThriftDef)((a,t) => t plus a)
-    }
-
-    def definition : Parser[ThriftDef] = const ^^ { case c => new ThriftDef(c)} |
-        typeDefinition |
-        service ^^ { case s => new ThriftDef(s)}
-
-
-    def typeDefinition : Parser[ThriftDef] = (typedef ^^ {case t => new ThriftDef(t)} |
-        enum ^^ {case e => new ThriftDef(e)} |
-        senum ^^ {case e => new ThriftDef(e)} |
-        struct ^^ {case e => new ThriftDef(e)} |
-        union ^^ {case e => new ThriftDef(e)} |
-        xception ^^ {case e => new ThriftDef(e)}
-        )
-
-    def typedef : Parser[TypeDef] = TYPEDEF ~ fieldType ~ ident ~ typeAnnotations.? ^^ {
-        case t ~ f ~ i ~ tA => TypeDef(i, f, tA)
-    }
-
-    def enum : Parser[EnumDef] = ENUM ~ ident ~ LBRACKET ~ enumDef.* ~ RBRACKET ~ typeAnnotations.? ^^ {
-        case e ~ i ~  l ~ ed ~ r ~ t => EnumDef(i, ed.toList, t)
-    }
-
-    def enumDef : Parser[EnumValueDef] = ident ~ EQ ~ numericLit ~ typeAnnotations.? ~ commaOrSemicolon.? ^^ {
-        case i ~ e ~ n ~ t ~ c => EnumValueDef(i, Some(IntConstant(n.toInt)), t)
-    }
-
-    def senum : Parser[SEnumDef] = SENUM ~ ident ~ LBRACKET ~ senumDef.* ~ RBRACKET ~ typeAnnotations.? ^^ {
-        case se ~ i ~  l ~ sed ~ r ~ t => SEnumDef(i, sed.toList, t)
-    }
-
-    def senumDef : Parser[String] = stringLit <~ commaOrSemicolon.?
-
-    def service : Parser[ServiceDef] = SERVICE ~ ident ~ extnds.? ~ LBRACKET ~ function.* ~
-        RBRACKET ~ typeAnnotations.? ^^ {
-        case s ~ i ~ e ~ lb ~ fs ~ rb ~ tA => ServiceDef(i, e, fs, tA)
-    }
-
-    def extnds : Parser[String] = EXTENDS ~> ident
-
-    def function : Parser[FunctionDef] = ONEWAY.? ~ functionType ~ ident ~ LPAREN ~ field.* ~ RPAREN ~ throwz.? ~
-        typeAnnotations.? ~ commaOrSemicolon.? ^^ {
-        case o ~ fT ~ i ~ lp ~ fs ~ rp ~ th ~ tA ~ cS => FunctionDef(isOneWay(o), fT, i, fs, th, tA)
-    }
-
-    def throwz : Parser[List[FieldDef]] = THROWS ~ LPAREN ~ field.* ~ RPAREN ^^ {
-        case t ~ l ~ fs ~ r => fs.toList
-    }
-
-    def functionType : Parser[FunctionType] = VOID ^^^ VoidType() | fieldType
-
-    def xception : Parser[ExceptionDef] = EXCEPTION ~ ident ~ LBRACKET ~ field.* ~ RBRACKET ~ typeAnnotations.? ^^ {
-        case s ~ i ~ lb ~ fs ~ rb ~ tA => ExceptionDef(i, fs.toList, tA)
-    }
-
-    def union : Parser[UnionDef] = UNION ~ ident ~ XSD_ALL.? ~ LBRACKET ~ field.* ~ RBRACKET ~ typeAnnotations.? ^^ {
-        case s ~ i ~ xA ~ lb ~ fs ~ rb ~ tA => UnionDef(i, isXsdAll(xA), fs.toList, tA)
-    }
-
-    def struct : Parser[StructDef] = STRUCT ~ ident ~ XSD_ALL.? ~ LBRACKET ~ field.* ~ RBRACKET ~ typeAnnotations.? ^^ {
-        case s ~ i ~ xA ~ lb ~ fs ~ rb ~ tA => StructDef(i, isXsdAll(xA), fs.toList, tA)
-    }
-
-    def field : Parser[FieldDef] = fieldIdentifier.? ~ fieldRequiredness.? ~ fieldType ~ ident ~ fieldValue.? ~
-        XSD_OPT.? ~ XSD_NILBLE.? ~ xsdAttributes.? ~ typeAnnotations.? ~ commaOrSemicolon.? ^^ {
-        case fi ~ fr ~ ft ~id ~ fv ~ xo ~ xn ~ xa ~ tA ~ cS => FieldDef(
-            fi,
-            isRequired(fr),
-            ft,
-            id,
-            fv,
-            isXsdOptional(xo),
-            isXsdNillable(xn),
-            xa,
-            tA
-        )
-    }
-
-    def xsdAttributes : Parser[XsdAttributes] = XSD_ATTRS ~ LBRACKET ~ field.* ~ RBRACKET ^^ {
-        case x ~ l ~ f ~ r => XsdAttributes(f)
-    }
-
-    def fieldValue = EQ ~> constValue
-
-    def fieldRequiredness : Parser[String] = REQUIRED | OPTIONAL
-
-    def fieldIdentifier : Parser[IntConstant] = numericLit <~ COLON ^^ {
-        case n => IntConstant(n.toInt)
-    }
-
-    def const : Parser[ConstDef] = CONST ~ fieldType ~ ident ~ EQ ~ constValue ~ commaOrSemicolon.? ^^ {
-        case c ~ fT ~ i ~ e ~ cV ~ cS => ConstDef(fT, i, cV)
-    }
-
-    def parseDouble(s: String) = try { Some(s.toDouble) } catch { case _ : Throwable => None }
-
-    def constValue : Parser[ConstValue] = numericLit ^^ {
-        case n => parseDouble(n) match {
-            case Some(d) => DoubleConstant(d)
-            case _ => IntConstant(n.toInt)
-        }
-    } |
-        hexConstant ^^ { case h => IntConstant(Integer.parseInt(h, 16))} |
-        stringLit ^^ { case s => StringConstant(s)} |
-        ident ^^ { case i => IdConstant(i)} |
-        constList |
-        constMap
-
-    def constValuePair = constValue ~ COLON ~ constValue ~ commaOrSemicolon.? ^^ {
-        case k ~ c ~ v ~ cs => ConstantValuePair(k,v)
-    }
-
-    def constList = LSQBRACKET ~ (constValue <~ commaOrSemicolon).* ~ RSQBRACKET ^^ {
-        case l ~ vs ~ r => ConstantList(vs)
-    }
-
-    def constMap = LBRACKET ~ constValuePair.* ~ RBRACKET ^^ {
-        case l ~ ps ~ r => ConstantMap(ps)
-    }
-}
-
-class ThriftLexer(val keywords: Seq[String], val delims : Seq[String]) extends StdLexical with ImplicitConversions {
-
-    case class HexConstant(chars: String) extends Token {
-        override def toString = chars
-    }
-
-    case class StIdentifier(chars: String) extends Token {
-        override def toString = chars
-    }
-
-    reserved ++= keywords
-
-    delimiters ++= delims
-
-    override lazy val token: Parser[Token] =
-        ( intConstant ^^ NumericLit
-            | hexConstant ^^ HexConstant
-            | dubConstant ^^ NumericLit
-            | identifier ^^ processIdent
-            | st_identifier ^^ StIdentifier
-            | string ^^ StringLit
-            | EofCh ^^^ EOF
-            | '\'' ~> failure("unclosed string literal")
-            | '"' ~> failure("unclosed string literal")
-            | delim
-            | failure("illegal character")
-            )
-
-    override def identChar = letter | elem('_')
-
-    def identifier = identChar ~ (identChar | digit | '.' ).* ^^
-        { case first ~ rest => (first :: rest).mkString }
-
-    def st_identChar = letter | elem('-')
-    def st_identifier = st_identChar ~ (st_identChar | digit | '.' | '_').* ^^
-        { case first ~ rest => (first :: rest).mkString("")}
-
-    override def whitespace: Parser[Any] =
-        ( whitespaceChar
-            | '/' ~ '*' ~ comment
-            | '/' ~ '/' ~ chrExcept(EofCh, '\n').*
-            | '#' ~ chrExcept(EofCh, '\n').*
-            | '/' ~ '*' ~ failure("unclosed comment")
-            ).*
-
-    protected override def comment: Parser[Any] = (
-        commentChar.* ~ '*' ~ '/'
-        )
-
-    protected def commentChar = chrExcept(EofCh, '*') | '*' ~ not('/')
-
-    def string = '\"' ~> chrExcept('\"', '\n', EofCh).* <~ '\"' ^^ { _ mkString "" } |
-        '\'' ~> chrExcept('\'', '\n', EofCh).* <~ '\'' ^^ { _ mkString "" }
-
-    def zero: Parser[String] = '0' ^^^ "0"
-    def nonzero = elem("nonzero digit", d => d.isDigit && d != '0')
-    def sign = elem("sign character", d => d == '-' || d == '+')
-    def exponent = elem("exponent character", d => d == 'e' || d == 'E')
-
-
-    def intConstant = opt(sign) ~> zero | intList
-    def intList = opt(sign) ~ nonzero ~ rep(digit) ^^ {case s ~ x ~ y =>  (optString("", s) :: x :: y) mkString ""}
-    def fracPart = '.' ~> rep(digit) ^^ { "." + _ mkString "" }
-    def expPart = exponent ~ opt(sign) ~ rep1(digit) ^^ { case e ~ s ~ d =>
-        e.toString + optString("", s) + d.mkString("")
-    }
-
-    def dubConstant = opt(sign) ~ digit.* ~ fracPart ~ opt(expPart) ^^ { case s ~ i ~ f ~ e =>
-        optString("", s) + i + f + optString("", e)
-    }
-
-    val hexDigits = Set[Char]() ++ "0123456789abcdefABCDEF".toArray
-    def hexDigit = elem("hex digit", hexDigits.contains(_))
-
-    def hexConstant = '0' ~> 'x' ~> hexDigit.+ ^^ {case h => h.mkString("")}
-
-
-    private def optString[A](pre: String, a: Option[A]) = a match {
-        case Some(x) => pre + x.toString
-        case None => ""
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftTypesGen.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftTypesGen.scala b/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftTypesGen.scala
deleted file mode 100755
index e04e192..0000000
--- a/tools/src/main/scala/org/apache/hadoop/metadata/tools/thrift/ThriftTypesGen.scala
+++ /dev/null
@@ -1,316 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.tools.thrift
-
-import com.google.common.collect.ImmutableList
-import org.apache.hadoop.metadata.typesystem.TypesDef
-import org.apache.hadoop.metadata.typesystem.types._
-import org.apache.hadoop.metadata.MetadataException
-import org.slf4j.{Logger, LoggerFactory}
-
-import scala.io.Source
-import scala.util.{Failure, Success, Try}
-
-
-case class CompositeRelation(typeName: String, fieldName: String, reverseFieldName: Option[String])
-
-/**
- * Convert a [[org.apache.hadoop.metadata.tools.thrift.ThriftDef ThriftDef]] to
- * [[TypesDef TypesDef]]. Currently there are several restrictions:
- *
- * - CppIncludes, SEnums are not allowed
- * - The only include allowed is that of "share/fb303/if/fb303.thrift". This include is ignored.
- * Any other include will trigger an exception
- * - Namespaces, TypeDefs, Contants, Unions, Exceptions, and Service definitions are ignored.
- * - So for fields typeDefs are not applied.
- * - Field Constant values are ignored.
- * - Type Annotations, XSD information is ignored.
- *
- * Thrift Structs can be mapped to Structs, Traits for Classes. The caller can specify their preference by
- * providing the structNames, classNames and thriftNames parameters. A Struct that is not in one of these 3
- * lists is not mapped.
- *
- * The ThriftDef doesn't specify if a relationship is composite. For e.g. in the thrift definition
- * {{{
- *     struct Person {
-        1: string       name,
-        2: Address   addr,
-      }
-      struct Address {
-        1: string       street,
-        2: string   city,
-      }
- * }}}
- *
- * If Person and Address are mapped to classes, you may not to make the Person -> Address a Composite relation.
- * The caller can specify these in the 'compositeRelations' parameter.
- *
- */
-class ThriftTypesGen(val structNames: List[String], val classNames: List[String], val traitNames: List[String],
-                     val compositeRelations: List[CompositeRelation]) {
-
-    private val LOG: Logger = LoggerFactory.getLogger(classOf[ThriftTypesGen])
-    private val FB_INCLUDE = "share/fb303/if/fb303.thrift"
-    /**
-     * for a (typeName, fieldName) specifies (isComposite, reverseName)
-     * if entry doesn't exist than field is not composite.
-     */
-    private var compositeRelsMap: Map[(String, String), (Boolean, Option[String])] = Map()
-
-    def apply(thriftResource: String): TypesDef = {
-        val tDef = parseThrift(thriftResource)
-
-        tDef.flatMap(buildCompositeRelations).flatMap(typesDef) match {
-            case Success(t) => t
-            case Failure(v) => throw v
-        }
-
-    }
-
-    def buildCompositeRelations(thriftDef: ThriftDef): Try[ThriftDef] = Try {
-
-        compositeRelations.foreach { cr =>
-
-            val sDef = thriftDef.structs.find(_.name == cr.typeName)
-            if (!sDef.isDefined) {
-                throw new MetadataException(s"Unknown Struct (${cr.typeName}) specified in CompositeRelation")
-
-            }
-            val fDef = sDef.get.fields.find(_.name == cr.fieldName)
-            if (!fDef.isDefined) {
-                throw new MetadataException(s"Unknown Field (${cr.fieldName}) specified in CompositeRelation")
-
-            }
-
-            compositeRelsMap = compositeRelsMap + ((cr.typeName, cr.fieldName) ->(true, cr.reverseFieldName))
-
-            if (cr.reverseFieldName.isDefined) {
-                val reverseStructName = dataTypeName(fDef.get.fieldType)
-                val reverseStructDef = thriftDef.structs.find(_.name == reverseStructName)
-                if (!reverseStructDef.isDefined) {
-                    throw new MetadataException(s"Cannot find Struct $reverseStructName in CompositeRelation $cr")
-                }
-                val rfDef = reverseStructDef.get.fields.find(_.name == cr.reverseFieldName)
-                if (!rfDef.isDefined) {
-                    throw new MetadataException(s"Unknown Reverse Field (${cr.reverseFieldName}) specified in CompositeRelation")
-                }
-
-                List(cr, CompositeRelation(reverseStructName, cr.reverseFieldName.get, Some(cr.fieldName)))
-
-                compositeRelsMap = compositeRelsMap +
-                    ((reverseStructName, cr.reverseFieldName.get) ->(false, Some(cr.fieldName)))
-            }
-        }
-
-        thriftDef
-    }
-
-    def typesDef(thriftDef: ThriftDef): Try[TypesDef] = {
-        var tDef: Try[TypesDef] = Try {
-            TypesDef(Seq(), Seq(), Seq(), Seq())
-        }
-
-        tDef.flatMap((t: TypesDef) => includes(t, thriftDef.includes)).flatMap((t: TypesDef) => cppIncludes(t, thriftDef.cppIncludes))
-
-        tDef = tDef.flatMap((t: TypesDef) => includes(t, thriftDef.includes)).
-            flatMap((t: TypesDef) => cppIncludes(t, thriftDef.cppIncludes)).
-            flatMap((t: TypesDef) => namespaces(t, thriftDef.namespaces)).
-            flatMap((t: TypesDef) => constants(t, thriftDef.constants)).
-            flatMap((t: TypesDef) => senums(t, thriftDef.senums)).
-            flatMap((t: TypesDef) => enums(t, thriftDef.enums)).
-            flatMap((t: TypesDef) => structs(t, thriftDef.structs)).
-            flatMap((t: TypesDef) => unions(t, thriftDef.unions)).
-            flatMap((t: TypesDef) => exceptions(t, thriftDef.xceptions)).
-            flatMap((t: TypesDef) => services(t, thriftDef.services))
-
-
-        tDef
-    }
-
-    private def parseThrift(thriftResource: String): Try[ThriftDef] = {
-        Try {
-            LOG.debug("Parsing Thrift resource {}", thriftResource)
-            val is = getClass().getResourceAsStream(thriftResource)
-            val src: Source = Source.fromInputStream(is)
-            val thriftStr: String = src.getLines().mkString("\n")
-            val p = new ThriftParser
-            var thriftDef: Option[ThriftDef] = p(thriftStr)
-            thriftDef match {
-                case Some(s) => s
-                case None => {
-                    LOG.debug("Parse for thrift resource {} failed", thriftResource)
-                    throw new MetadataException(s"Failed to parse thrift resource: $thriftResource")
-                }
-            }
-        }
-    }
-
-    @throws[MetadataException]
-    private def dataTypeName(fT: FieldType): String = fT match {
-        case IdentifierType(n) => n
-        case BaseType(typ, _) => BASE_TYPES.toPrimitiveTypeName(typ)
-        case ListType(elemType, _, _) => DataTypes.arrayTypeName(dataTypeName(elemType))
-        case SetType(elemType, _, _) => DataTypes.arrayTypeName(dataTypeName(elemType))
-        case MapType(keyType, valueType, _, _) => DataTypes.mapTypeName(dataTypeName(keyType), dataTypeName(valueType))
-    }
-
-    private def enumValue(e: EnumValueDef, defId: Int): EnumValue = e match {
-        case EnumValueDef(value, Some(id), _) => new EnumValue(value, id.value)
-        case EnumValueDef(value, None, _) => new EnumValue(value, defId)
-    }
-
-    private def enumDef(td: TypesDef, e: EnumDef): Try[TypesDef] = {
-        Success(
-            td.copy(enumTypes = td.enumTypes :+
-                new EnumTypeDefinition(e.name, e.enumValues.zipWithIndex.map(t => enumValue(t._1, -t._2)): _*))
-        )
-    }
-
-    private def includeDef(td: TypesDef, i: IncludeDef): Try[TypesDef] = {
-        Try {
-            if (i.value != FB_INCLUDE) {
-                throw new MetadataException(s"Unsupported Include ${i.value}, only fb303.thrift is currently allowed.")
-            }
-            td
-        }
-    }
-
-    private def cppIncludeDef(td: TypesDef, i: CppIncludeDef): Try[TypesDef] = {
-        Try {
-            throw new MetadataException(s"Unsupported CppInclude ${i.value}.")
-        }
-    }
-
-    private def namespaceDef(td: TypesDef, i: NamespaceDef): Try[TypesDef] = {
-        Try {
-            LOG.debug(s"Ignoring Namespace definition $i")
-            td
-        }
-    }
-
-    private def constantDef(td: TypesDef, i: ConstDef): Try[TypesDef] = {
-        Try {
-            LOG.debug(s"Ignoring ConstantDef definition $i")
-            td
-        }
-    }
-
-    private def senumDef(td: TypesDef, i: SEnumDef): Try[TypesDef] = {
-        Try {
-            throw new MetadataException(s"Unsupported SEnums ${i}.")
-        }
-    }
-
-    private def fieldDef(typName: String, fd: FieldDef): AttributeDefinition = {
-        val name: String = fd.name
-        val dTName: String = dataTypeName(fd.fieldType)
-
-        var m: Multiplicity = Multiplicity.OPTIONAL
-
-        if (fd.requiredNess) {
-            m = Multiplicity.REQUIRED
-        }
-
-        fd.fieldType match {
-            case _: ListType => m = Multiplicity.COLLECTION
-            case _: SetType => m = Multiplicity.SET
-            case _ => ()
-        }
-
-        var isComposite = false
-        var reverseAttrName: String = null
-
-        val r = compositeRelsMap.get((typName, name))
-        if (r.isDefined) {
-            isComposite = r.get._1
-            if (r.get._2.isDefined) {
-                reverseAttrName = r.get._2.get
-            }
-        }
-        new AttributeDefinition(name, dTName, m, isComposite, reverseAttrName)
-    }
-
-    private def structDef(td: TypesDef, structDef: StructDef): Try[TypesDef] = Try {
-        val typeName: String = structDef.name
-
-        typeName match {
-            case t if structNames contains t => td.copy(structTypes = td.structTypes :+
-                new StructTypeDefinition(typeName, structDef.fields.map(fieldDef(typeName, _)).toArray))
-            case t: String if traitNames contains t => {
-                val ts = td.traitTypes :+
-                    new HierarchicalTypeDefinition[TraitType](classOf[TraitType],
-                        typeName, ImmutableList.of[String](), structDef.fields.map(fieldDef(typeName, _)).toArray)
-                td.copy(traitTypes = ts)
-            }
-            case t: String if classNames contains t => {
-                val cs = td.classTypes :+
-                    new HierarchicalTypeDefinition[ClassType](classOf[ClassType],
-                        typeName, ImmutableList.of[String](), structDef.fields.map(fieldDef(typeName, _)).toArray)
-                td.copy(classTypes = cs)
-            }
-            case _ => td
-        }
-    }
-
-    private def unionDef(td: TypesDef, i: UnionDef): Try[TypesDef] = {
-        Try {
-            LOG.debug(s"Ignoring Union definition $i")
-            td
-        }
-    }
-
-    private def exceptionDef(td: TypesDef, i: ExceptionDef): Try[TypesDef] = {
-        Try {
-            LOG.debug(s"Ignoring Exception definition $i")
-            td
-        }
-    }
-
-    private def serviceDef(td: TypesDef, i: ServiceDef): Try[TypesDef] = {
-        Try {
-            LOG.debug(s"Ignoring Service definition $i")
-            td
-        }
-    }
-
-    private def applyList[T](fn: (TypesDef, T) => Try[TypesDef])(td: TypesDef, l: List[T]): Try[TypesDef] = {
-        l.foldLeft[Try[TypesDef]](Success(td))((b, a) => b.flatMap({ Unit => fn(td, a)}))
-    }
-
-    private def includes = applyList(includeDef) _
-
-    private def cppIncludes = applyList(cppIncludeDef) _
-
-    private def namespaces = applyList(namespaceDef) _
-
-    private def constants = applyList(constantDef) _
-
-    private def enums = applyList(enumDef) _
-
-    private def senums = applyList(senumDef) _
-
-    private def structs = applyList(structDef) _
-
-    private def unions = applyList(unionDef) _
-
-    private def exceptions = applyList(exceptionDef) _
-
-    private def services = applyList(serviceDef) _
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/tools/src/test/scala/org/apache/atlas/tools/dsl/DSLTest.scala
----------------------------------------------------------------------
diff --git a/tools/src/test/scala/org/apache/atlas/tools/dsl/DSLTest.scala b/tools/src/test/scala/org/apache/atlas/tools/dsl/DSLTest.scala
new file mode 100755
index 0000000..2691600
--- /dev/null
+++ b/tools/src/test/scala/org/apache/atlas/tools/dsl/DSLTest.scala
@@ -0,0 +1,241 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.tools.dsl
+
+import org.apache.atlas.tools.hive.HiveMockMetadataService
+import org.apache.atlas.typesystem.types.utils.TypesUtil
+import org.apache.atlas.typesystem.types.{DataTypes, StructType, TypeSystem}
+import org.json4s.native.JsonMethods._
+import org.junit.{Assert, Before, Test}
+
+/**
+ * DSL Test.
+ */
+class DSLTest {
+    val STRUCT_TYPE_1: String = "t1"
+    val STRUCT_TYPE_2: String = "t2"
+
+
+    @Before
+    def setup {
+        val ts: TypeSystem = TypeSystem.getInstance
+        ts.reset()
+
+        val structType: StructType = ts.defineStructType(
+            STRUCT_TYPE_1, true,
+            TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
+            TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
+            TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
+            TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
+            TypesUtil.createOptionalAttrDef("e", DataTypes.INT_TYPE),
+            TypesUtil.createOptionalAttrDef("f", DataTypes.INT_TYPE),
+            TypesUtil.createOptionalAttrDef("g", DataTypes.LONG_TYPE),
+            TypesUtil.createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
+            TypesUtil.createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
+            TypesUtil.createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
+            TypesUtil.createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
+            TypesUtil.createOptionalAttrDef("l", DataTypes.DATE_TYPE),
+            TypesUtil.createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
+            TypesUtil.createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
+            TypesUtil.createOptionalAttrDef("o",
+                ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)))
+
+        val recursiveStructType: StructType = ts.defineStructType(
+            STRUCT_TYPE_2, true,
+            TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE),
+            TypesUtil.createOptionalAttrDef("s", STRUCT_TYPE_2))
+    }
+
+    @Test def test1 {
+
+        // 1. Existing Types in System
+        //Assert.assertEquals(s"${listTypes}", "[t2, t1, int, array<bigdecimal>, long, double, date, float, short, biginteger, byte, string, boolean, bigdecimal, map<string,double>, array<int>]")
+
+        defineStructType("mytype",
+            attrDef("a", INT_TYPE, ATTR_REQUIRED),
+            attrDef("b", BOOLEAN_TYPE),
+            attrDef("c", BYTE_TYPE),
+            attrDef("d", SHORT_TYPE),
+            attrDef("e", INT_TYPE),
+            attrDef("f", INT_TYPE),
+            attrDef("g", LONG_TYPE),
+            attrDef("h", FLOAT_TYPE),
+            attrDef("i", DOUBLE_TYPE),
+            attrDef("j", BIGINT_TYPE),
+            attrDef("k", BIGDECIMAL_TYPE),
+            attrDef("l", DATE_TYPE),
+            attrDef("m", arrayType(INT_TYPE)),
+            attrDef("n", arrayType(BIGDECIMAL_TYPE)),
+            attrDef("o", mapType(STRING_TYPE, DOUBLE_TYPE)))
+
+        // 2. 'mytype' available as a a Type
+        Assert.assertEquals(s"${listTypes}", "[array<bigdecimal>,array<int>,map<string,double>,mytype,t1,t2]")
+
+        // 3. Create a 'mytype' instance from Json
+        val i = createInstance("mytype", """
+        {
+                               "$typeName$":"mytype",
+                               "e":1,
+                               "n":[1,1.1],
+                               "h":1.0,
+                               "b":true,
+                               "k":1,
+                               "j":1,
+                               "d":2,
+                               "m":[1,1],
+                               "g":1,
+                               "a":1,
+                               "i":1.0,
+                               "c":1,
+                               "l":"2014-12-03T08:00:00.000Z",
+                               "f":1,
+                               "o":{
+                                 "b":2.0,
+                                 "a":1.0
+                               }
+                             }
+                                         """)
+
+        // 4. Navigate mytype instance in code
+        // Examples of Navigate mytype instance in code
+        Assert.assertEquals(s"${i.a}", "1")
+        Assert.assertEquals(s"${i.o}", "{b=2.0, a=1.0}")
+        Assert.assertEquals(s"${i.o.asInstanceOf[java.util.Map[_, _]].keySet}", "[b, a]")
+
+        // 5. Serialize mytype instance to Json
+        Assert.assertEquals(s"${pretty(render(i))}", "{\n  \"$typeName$\":\"mytype\",\n  \"e\":1," + "\n  \"n\":[1,1.100000000000000088817841970012523233890533447265625],\n  \"h\":1.0,\n  \"b\":true,\n  \"k\":1,\n  \"j\":1,\n  \"d\":2,\n  \"m\":[1,1],\n  \"g\":1,\n  \"a\":1,\n  \"i\":1.0,\n  \"c\":1,\n  \"l\":\"2014-12-03T08:00:00.000Z\",\n  \"f\":1,\n  \"o\":{\n    \"b\":2.0,\n    \"a\":1.0\n  }\n}")
+    }
+
+    @Test def test2 {
+
+        // 1. Existing Types in System
+        Assert.assertEquals(s"${listTypes}", "[array<bigdecimal>,array<int>,map<string,double>,t1,t2]")
+
+        val addrType = defineStructType("addressType",
+            attrDef("houseNum", INT_TYPE, ATTR_REQUIRED),
+            attrDef("street", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("city", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("state", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("zip", INT_TYPE, ATTR_REQUIRED),
+            attrDef("country", STRING_TYPE, ATTR_REQUIRED)
+        )
+
+        val personType = defineStructType("personType",
+            attrDef("first_name", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("last_name", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("address", addrType)
+        )
+
+        // 2. updated Types in System
+        Assert.assertEquals(s"${listTypes}", "[addressType,array<bigdecimal>,array<int>,map<string,double>,personType,t1,t2]")
+
+
+        // 3. Construct a Person in Code
+        val person = createInstance("personType")
+        val address = createInstance("addressType")
+
+        person.first_name = "Meta"
+        person.last_name = "Hadoop"
+
+        address.houseNum = 3460
+        address.street = "W Bayshore Rd"
+        address.city = "Palo Alto"
+        address.state = "CA"
+        address.zip = 94303
+        address.country = "USA"
+
+        person.address = address
+
+        // 4. Convert to Json
+        Assert.assertEquals(s"${pretty(render(person))}", "{\n  \"$typeName$\":\"personType\",\n  \"first_name\":\"Meta\",\n  \"address\":{\n    \"$typeName$\":\"addressType\",\n    \"houseNum\":3460,\n    \"city\":\"Palo Alto\",\n    \"country\":\"USA\",\n    \"state\":\"CA\",\n    \"zip\":94303,\n    \"street\":\"W Bayshore Rd\"\n  },\n  \"last_name\":\"Hadoop\"\n}");
+
+        val p2 = createInstance("personType", """{
+                                              "first_name":"Meta",
+                                              "address":{
+                                                "houseNum":3460,
+                                                "city":"Palo Alto",
+                                                "country":"USA",
+                                                "state":"CA",
+                                                "zip":94303,
+                                                "street":"W Bayshore Rd"
+                                              },
+                                              "last_name":"Hadoop"
+                                            }""")
+
+    }
+
+    @Test def testHive(): Unit = {
+        val hiveTable = HiveMockMetadataService.getTable("tpcds", "date_dim")
+        //println(hiveTable)
+
+        //name : String, typeName : String, comment : String
+        val fieldType = defineStructType("FieldSchema",
+            attrDef("name", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("typeName", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("comment", STRING_TYPE)
+        )
+        /*
+        SerDe(name : String, serializationLib : String, parameters : Map[String, String])
+         */
+        defineStructType("SerDe",
+            attrDef("name", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("serializationLib", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE))
+        )
+
+        /*
+        StorageDescriptor(fields : List[FieldSchema],
+                                   location : String, inputFormat : String,
+                                    outputFormat : String, compressed : Boolean,
+                                    numBuckets : Int, bucketColumns : List[String],
+                                    sortColumns : List[String],
+                                    parameters : Map[String, String],
+                                    storedAsSubDirs : Boolean
+                                    )
+         */
+        val sdType = defineStructType("StorageDescriptor",
+            attrDef("location", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("inputFormat", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("outputFormat", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("compressed", BOOLEAN_TYPE),
+            attrDef("numBuckets", INT_TYPE),
+            attrDef("bucketColumns", arrayType(STRING_TYPE)),
+            attrDef("sortColumns", arrayType(STRING_TYPE)),
+            attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE)),
+            attrDef("storedAsSubDirs", BOOLEAN_TYPE)
+        )
+
+        /*
+        case class Table(dbName : String, tableName : String, storageDesc : StorageDescriptor,
+                       parameters : Map[String, String],
+                        tableType : String)
+         */
+        defineStructType("Table",
+            attrDef("dbName", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("tableName", STRING_TYPE, ATTR_REQUIRED),
+            attrDef("storageDesc", sdType, ATTR_REQUIRED),
+            attrDef("compressed", BOOLEAN_TYPE),
+            attrDef("numBuckets", INT_TYPE),
+            attrDef("bucketColumns", arrayType(STRING_TYPE)),
+            attrDef("sortColumns", arrayType(STRING_TYPE)),
+            attrDef("parameters", mapType(STRING_TYPE, STRING_TYPE)),
+            attrDef("storedAsSubDirs", BOOLEAN_TYPE)
+        )
+    }
+}


Mime
View raw message