atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jma...@apache.org
Subject [14/51] [abbrv] incubator-atlas git commit: Refactor packages and scripts to Atlas (cherry picked from commit 414beba)
Date Sun, 14 Jun 2015 17:44:54 GMT
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructType.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructType.java
deleted file mode 100755
index d24ab6b..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructType.java
+++ /dev/null
@@ -1,204 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.typesystem.IStruct;
-import org.apache.hadoop.metadata.typesystem.ITypedStruct;
-
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
-public class StructType extends AbstractDataType<IStruct>
-        implements IConstructableType<IStruct, ITypedStruct> {
-
-    public final TypeSystem typeSystem;
-    public final String name;
-    public final FieldMapping fieldMapping;
-    public final Map<AttributeInfo, List<String>> infoToNameMap;
-    public final int numFields;
-    private final TypedStructHandler handler;
-
-    /**
-     * Used when creating a StructType, to support recursive Structs.
-     */
-    protected StructType(TypeSystem typeSystem, String name, int numFields) {
-        this.typeSystem = typeSystem;
-        this.name = name;
-        this.fieldMapping = null;
-        infoToNameMap = null;
-        this.numFields = numFields;
-        this.handler = null;
-    }
-
-    protected StructType(TypeSystem typeSystem, String name,
-                         ImmutableList<String> superTypes, AttributeInfo... fields)
-    throws MetadataException {
-        this.typeSystem = typeSystem;
-        this.name = name;
-        this.fieldMapping = constructFieldMapping(superTypes,
-                fields);
-        infoToNameMap = TypeUtils.buildAttrInfoToNameMap(this.fieldMapping);
-        this.numFields = this.fieldMapping.fields.size();
-        this.handler = new TypedStructHandler(this);
-    }
-
-    public FieldMapping fieldMapping() {
-        return fieldMapping;
-    }
-
-    @Override
-    public String getName() {
-        return name;
-    }
-
-    protected FieldMapping constructFieldMapping(ImmutableList<String> superTypes,
-                                                 AttributeInfo... fields)
-    throws MetadataException {
-
-        Map<String, AttributeInfo> fieldsMap = new LinkedHashMap<String, AttributeInfo>();
-        Map<String, Integer> fieldPos = new HashMap<String, Integer>();
-        Map<String, Integer> fieldNullPos = new HashMap<String, Integer>();
-        int numBools = 0;
-        int numBytes = 0;
-        int numShorts = 0;
-        int numInts = 0;
-        int numLongs = 0;
-        int numFloats = 0;
-        int numDoubles = 0;
-        int numBigInts = 0;
-        int numBigDecimals = 0;
-        int numDates = 0;
-        int numStrings = 0;
-        int numArrays = 0;
-        int numMaps = 0;
-        int numStructs = 0;
-        int numReferenceables = 0;
-
-        for (AttributeInfo i : fields) {
-            if (fieldsMap.containsKey(i.name)) {
-                throw new MetadataException(
-                        String.format(
-                                "Struct defintion cannot contain multiple fields with the same " +
-                                        "name %s",
-                                i.name));
-            }
-            fieldsMap.put(i.name, i);
-            fieldNullPos.put(i.name, fieldNullPos.size());
-            if (i.dataType() == DataTypes.BOOLEAN_TYPE) {
-                fieldPos.put(i.name, numBools);
-                numBools++;
-            } else if (i.dataType() == DataTypes.BYTE_TYPE) {
-                fieldPos.put(i.name, numBytes);
-                numBytes++;
-            } else if (i.dataType() == DataTypes.SHORT_TYPE) {
-                fieldPos.put(i.name, numShorts);
-                numShorts++;
-            } else if (i.dataType() == DataTypes.INT_TYPE) {
-                fieldPos.put(i.name, numInts);
-                numInts++;
-            } else if (i.dataType() == DataTypes.LONG_TYPE) {
-                fieldPos.put(i.name, numLongs);
-                numLongs++;
-            } else if (i.dataType() == DataTypes.FLOAT_TYPE) {
-                fieldPos.put(i.name, numFloats);
-                numFloats++;
-            } else if (i.dataType() == DataTypes.DOUBLE_TYPE) {
-                fieldPos.put(i.name, numDoubles);
-                numDoubles++;
-            } else if (i.dataType() == DataTypes.BIGINTEGER_TYPE) {
-                fieldPos.put(i.name, numBigInts);
-                numBigInts++;
-            } else if (i.dataType() == DataTypes.BIGDECIMAL_TYPE) {
-                fieldPos.put(i.name, numBigDecimals);
-                numBigDecimals++;
-            } else if (i.dataType() == DataTypes.DATE_TYPE) {
-                fieldPos.put(i.name, numDates);
-                numDates++;
-            } else if (i.dataType() == DataTypes.STRING_TYPE) {
-                fieldPos.put(i.name, numStrings);
-                numStrings++;
-            } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ENUM) {
-                fieldPos.put(i.name, numInts);
-                numInts++;
-            } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
-                fieldPos.put(i.name, numArrays);
-                numArrays++;
-            } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
-                fieldPos.put(i.name, numMaps);
-                numMaps++;
-            } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.STRUCT ||
-                    i.dataType().getTypeCategory() == DataTypes.TypeCategory.TRAIT) {
-                fieldPos.put(i.name, numStructs);
-                numStructs++;
-            } else if (i.dataType().getTypeCategory() == DataTypes.TypeCategory.CLASS) {
-                fieldPos.put(i.name, numReferenceables);
-                numReferenceables++;
-            } else {
-                throw new MetadataException(String.format("Unknown datatype %s", i.dataType()));
-            }
-        }
-
-        return new FieldMapping(fieldsMap,
-                fieldPos,
-                fieldNullPos,
-                numBools,
-                numBytes,
-                numShorts,
-                numInts,
-                numLongs,
-                numFloats,
-                numDoubles,
-                numBigInts,
-                numBigDecimals,
-                numDates,
-                numStrings,
-                numArrays,
-                numMaps,
-                numStructs,
-                numReferenceables);
-    }
-
-
-    @Override
-    public DataTypes.TypeCategory getTypeCategory() {
-        return DataTypes.TypeCategory.STRUCT;
-    }
-
-    @Override
-    public ITypedStruct convert(Object val, Multiplicity m) throws MetadataException {
-        return handler.convert(val, m);
-    }
-
-    public ITypedStruct createInstance() {
-        return handler.createInstance();
-    }
-
-    @Override
-    public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
-        handler.output(s, buf, prefix);
-    }
-
-    public List<String> getNames(AttributeInfo info) {
-        return infoToNameMap.get(info);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructTypeDefinition.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructTypeDefinition.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructTypeDefinition.java
deleted file mode 100755
index 251de4f..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/StructTypeDefinition.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types;
-
-import org.apache.hadoop.metadata.ParamChecker;
-
-import java.util.Arrays;
-
-public class StructTypeDefinition {
-
-    public final String typeName;
-    public final AttributeDefinition[] attributeDefinitions;
-
-    protected StructTypeDefinition(String typeName, boolean validate, AttributeDefinition... attributeDefinitions) {
-        this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
-        if (attributeDefinitions != null && attributeDefinitions.length != 0) {
-            ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
-        }
-        this.attributeDefinitions = attributeDefinitions;
-    }
-
-    public StructTypeDefinition(String typeName, AttributeDefinition[] attributeDefinitions) {
-        this.typeName = ParamChecker.notEmpty(typeName, "Struct type name");
-        this.attributeDefinitions = ParamChecker.notNullElements(attributeDefinitions, "Attribute definitions");
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-
-        StructTypeDefinition that = (StructTypeDefinition) o;
-
-        if (!Arrays.equals(attributeDefinitions, that.attributeDefinitions)) return false;
-        if (!typeName.equals(that.typeName)) return false;
-
-        return true;
-    }
-
-    @Override
-    public int hashCode() {
-        int result = typeName.hashCode();
-        result = 31 * result + Arrays.hashCode(attributeDefinitions);
-        return result;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TraitType.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TraitType.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TraitType.java
deleted file mode 100755
index 87181ed..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TraitType.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.typesystem.IStruct;
-import org.apache.hadoop.metadata.typesystem.ITypedStruct;
-
-import java.util.List;
-import java.util.Map;
-
-public class TraitType extends HierarchicalType<TraitType, IStruct>
-        implements IConstructableType<IStruct, ITypedStruct> {
-
-    public final Map<AttributeInfo, List<String>> infoToNameMap;
-    private final TypedStructHandler handler;
-
-    /**
-     * Used when creating a TraitType, to support recursive Structs.
-     */
-    TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits,
-              int numFields) {
-        super(typeSystem, TraitType.class, name, superTraits, numFields);
-        handler = null;
-        infoToNameMap = null;
-    }
-
-    TraitType(TypeSystem typeSystem, String name, ImmutableList<String> superTraits,
-              AttributeInfo... fields)
-    throws MetadataException {
-        super(typeSystem, TraitType.class, name, superTraits, fields);
-        handler = new TypedStructHandler(this);
-        infoToNameMap = TypeUtils.buildAttrInfoToNameMap(fieldMapping);
-    }
-
-    @Override
-    public DataTypes.TypeCategory getTypeCategory() {
-        return DataTypes.TypeCategory.TRAIT;
-    }
-
-    @Override
-    public ITypedStruct convert(Object val, Multiplicity m) throws MetadataException {
-        return handler.convert(val, m);
-    }
-
-    public ITypedStruct createInstance() {
-        return handler.createInstance();
-    }
-
-    @Override
-    public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
-        handler.output(s, buf, prefix);
-    }
-
-    @Override
-    public List<String> getNames(AttributeInfo info) {
-        return infoToNameMap.get(info);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeSystem.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeSystem.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeSystem.java
deleted file mode 100755
index f44b0bb..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeSystem.java
+++ /dev/null
@@ -1,674 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types;
-
-import com.google.common.collect.ArrayListMultimap;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.Multimap;
-import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.classification.InterfaceAudience;
-import org.apache.hadoop.metadata.typesystem.TypesDef;
-
-import javax.inject.Singleton;
-import java.lang.reflect.Constructor;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.concurrent.ConcurrentHashMap;
-
-@Singleton
-@InterfaceAudience.Private
-public class TypeSystem {
-    private static final TypeSystem INSTANCE = new TypeSystem();
-    private static ThreadLocal<SimpleDateFormat> dateFormat = new ThreadLocal() {
-        @Override
-        public SimpleDateFormat initialValue() {
-            SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX");
-            dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
-            return dateFormat;
-        }
-    };
-
-    private Map<String, IDataType> types;
-    private IdType idType;
-
-    /**
-     * An in-memory copy of type categories vs types for convenience.
-     */
-    private Multimap<DataTypes.TypeCategory, String> typeCategoriesToTypeNamesMap;
-
-    private ImmutableList<String> coreTypes;
-
-    public TypeSystem() {
-        initialize();
-    }
-
-    public static TypeSystem getInstance() {
-        return INSTANCE;
-    }
-
-    /**
-     * This is only used for testing purposes. Not intended for public use.
-     */
-    @InterfaceAudience.Private
-    public void reset() {
-        initialize();
-    }
-
-    private void initialize() {
-        types = new ConcurrentHashMap<>();
-        typeCategoriesToTypeNamesMap = ArrayListMultimap.create(DataTypes.TypeCategory.values().length, 10);
-
-        registerPrimitiveTypes();
-        registerCoreTypes();
-        coreTypes = ImmutableList.copyOf(types.keySet());
-    }
-
-    public ImmutableList<String> getCoreTypes() {
-        return coreTypes;
-    }
-
-    public ImmutableList<String> getTypeNames() {
-        return ImmutableList.copyOf(types.keySet());
-    }
-
-    public ImmutableList<String> getTypeNamesByCategory(DataTypes.TypeCategory typeCategory) {
-        return ImmutableList.copyOf(typeCategoriesToTypeNamesMap.get(typeCategory));
-    }
-
-    private void registerPrimitiveTypes() {
-        types.put(DataTypes.BOOLEAN_TYPE.getName(), DataTypes.BOOLEAN_TYPE);
-        types.put(DataTypes.BYTE_TYPE.getName(), DataTypes.BYTE_TYPE);
-        types.put(DataTypes.SHORT_TYPE.getName(), DataTypes.SHORT_TYPE);
-        types.put(DataTypes.INT_TYPE.getName(), DataTypes.INT_TYPE);
-        types.put(DataTypes.LONG_TYPE.getName(), DataTypes.LONG_TYPE);
-        types.put(DataTypes.FLOAT_TYPE.getName(), DataTypes.FLOAT_TYPE);
-        types.put(DataTypes.DOUBLE_TYPE.getName(), DataTypes.DOUBLE_TYPE);
-        types.put(DataTypes.BIGINTEGER_TYPE.getName(), DataTypes.BIGINTEGER_TYPE);
-        types.put(DataTypes.BIGDECIMAL_TYPE.getName(), DataTypes.BIGDECIMAL_TYPE);
-        types.put(DataTypes.DATE_TYPE.getName(), DataTypes.DATE_TYPE);
-        types.put(DataTypes.STRING_TYPE.getName(), DataTypes.STRING_TYPE);
-
-        typeCategoriesToTypeNamesMap.putAll(DataTypes.TypeCategory.PRIMITIVE, types.keySet());
-    }
-
-
-    /*
-     * The only core OOB type we will define is the Struct to represent the Identity of an Instance.
-     */
-    private void registerCoreTypes() {
-        idType = new IdType();
-    }
-
-    public IdType getIdType() {
-        return idType;
-    }
-
-    public boolean isRegistered(String typeName) {
-        return types.containsKey(typeName);
-    }
-
-    public <T> T getDataType(Class<T> cls, String name) throws MetadataException {
-        if (types.containsKey(name)) {
-            try {
-                return cls.cast(types.get(name));
-            } catch(ClassCastException cce) {
-                throw new MetadataException(cce);
-            }
-        }
-
-        /*
-         * is this an Array Type?
-         */
-        String arrElemType = TypeUtils.parseAsArrayType(name);
-        if (arrElemType != null) {
-            IDataType dT = defineArrayType(getDataType(IDataType.class, arrElemType));
-            return cls.cast(dT);
-        }
-
-        /*
-         * is this a Map Type?
-         */
-        String[] mapType = TypeUtils.parseAsMapType(name);
-        if (mapType != null) {
-            IDataType dT = defineMapType(getDataType(IDataType.class, mapType[0]),
-                    getDataType(IDataType.class, mapType[1]));
-            return cls.cast(dT);
-        }
-
-        throw new MetadataException(String.format("Unknown datatype: %s", name));
-    }
-
-    public StructType defineStructType(String name,
-                                       boolean errorIfExists,
-                                       AttributeDefinition... attrDefs) throws MetadataException {
-        StructTypeDefinition structDef = new StructTypeDefinition(name, attrDefs);
-        defineTypes(ImmutableList.of(structDef),
-                ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
-                ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
-
-        return getDataType(StructType.class, structDef.typeName);
-    }
-
-    /**
-     * construct a temporary StructType for a Query Result. This is not registered in the
-     * typeSystem.
-     * The attributes in the typeDefinition can only reference permanent types.
-     * @param name     struct type name
-     * @param attrDefs struct type definition
-     * @return temporary struct type
-     * @throws MetadataException
-     */
-    public StructType defineQueryResultType(String name,
-                                            Map<String, IDataType> tempTypes,
-                                            AttributeDefinition... attrDefs)
-            throws MetadataException {
-
-        AttributeInfo[] infos = new AttributeInfo[attrDefs.length];
-        for (int i = 0; i < attrDefs.length; i++) {
-            infos[i] = new AttributeInfo(this, attrDefs[i], tempTypes);
-        }
-
-        return new StructType(this, name, null, infos);
-    }
-
-    public TraitType defineTraitType(HierarchicalTypeDefinition<TraitType> traitDef)
-    throws MetadataException {
-
-        defineTypes(ImmutableList.<StructTypeDefinition>of(),
-                ImmutableList.of(traitDef),
-                ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
-
-        return getDataType(TraitType.class, traitDef.typeName);
-    }
-
-    public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef)
-    throws MetadataException {
-
-        defineTypes(ImmutableList.<StructTypeDefinition>of(),
-                ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
-                ImmutableList.of(classDef));
-
-        return getDataType(ClassType.class, classDef.typeName);
-    }
-
-    public Map<String, IDataType> defineTraitTypes(
-            HierarchicalTypeDefinition<TraitType>... traitDefs) throws MetadataException {
-        TransientTypeSystem transientTypes = new TransientTypeSystem(
-                ImmutableList.<StructTypeDefinition>of(),
-                ImmutableList.copyOf(traitDefs),
-                ImmutableList.<HierarchicalTypeDefinition<ClassType>>of());
-        return transientTypes.defineTypes();
-    }
-
-    public Map<String, IDataType> defineClassTypes(
-            HierarchicalTypeDefinition<ClassType>... classDefs) throws MetadataException {
-        TransientTypeSystem transientTypes = new TransientTypeSystem(
-                ImmutableList.<StructTypeDefinition>of(),
-                ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(),
-                ImmutableList.copyOf(classDefs));
-        return transientTypes.defineTypes();
-    }
-
-    public Map<String, IDataType> defineTypes(TypesDef typesDef)
-    throws MetadataException {
-        Map<String, IDataType> typesAdded = new HashMap<>();
-        for (EnumTypeDefinition enumDef : typesDef.enumTypesAsJavaList()) {
-            typesAdded.put(enumDef.name, defineEnumType(enumDef));
-        }
-
-        ImmutableList<StructTypeDefinition> structDefs = ImmutableList
-                .copyOf(typesDef.structTypesAsJavaList());
-        ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs =
-                ImmutableList.copyOf(typesDef.traitTypesAsJavaList());
-        ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs =
-                ImmutableList.copyOf(typesDef.classTypesAsJavaList());
-
-        typesAdded.putAll(defineTypes(structDefs, traitDefs, classDefs));
-        return typesAdded;
-    }
-
-    public Map<String, IDataType> defineTypes(ImmutableList<StructTypeDefinition> structDefs,
-                                              ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
-                                              ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs)
-    throws MetadataException {
-        TransientTypeSystem transientTypes = new TransientTypeSystem(structDefs,
-                traitDefs,
-                classDefs);
-        Map<String, IDataType> definedTypes = transientTypes.defineTypes();
-//        LOG.debug("Defined new types " + Arrays.toString(definedTypes.keySet().toArray(new String[definedTypes.size()])));
-        return definedTypes;
-    }
-
-    public DataTypes.ArrayType defineArrayType(IDataType elemType) throws MetadataException {
-        assert elemType != null;
-        DataTypes.ArrayType dT = new DataTypes.ArrayType(elemType);
-        types.put(dT.getName(), dT);
-        typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ARRAY, dT.getName());
-        return dT;
-    }
-
-    public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType)
-    throws MetadataException {
-        assert keyType != null;
-        assert valueType != null;
-        DataTypes.MapType dT = new DataTypes.MapType(keyType, valueType);
-        types.put(dT.getName(), dT);
-        typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.MAP, dT.getName());
-        return dT;
-    }
-
-    public EnumType defineEnumType(String name, EnumValue... values) throws MetadataException {
-        return defineEnumType(new EnumTypeDefinition(name, values));
-    }
-
-    public EnumType defineEnumType(EnumTypeDefinition eDef) throws MetadataException {
-        assert eDef.name != null;
-        if (types.containsKey(eDef.name)) {
-            throw new MetadataException(
-                    String.format("Redefinition of type %s not supported", eDef.name));
-        }
-
-        EnumType eT = new EnumType(this, eDef.name, eDef.enumValues);
-        types.put(eDef.name, eT);
-        typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.ENUM, eDef.name);
-        return eT;
-    }
-
-    public SimpleDateFormat getDateFormat() {
-        return dateFormat.get();
-    }
-
-    public boolean allowNullsInCollections() {
-        return false;
-    }
-
-    public void removeTypes(ImmutableList<String> typeNames) {
-
-    }
-
-    class TransientTypeSystem extends TypeSystem {
-
-        final ImmutableList<StructTypeDefinition> structDefs;
-        final ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs;
-        final ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs;
-        Map<String, StructTypeDefinition> structNameToDefMap = new HashMap<>();
-        Map<String, HierarchicalTypeDefinition<TraitType>> traitNameToDefMap =
-                new HashMap<>();
-        Map<String, HierarchicalTypeDefinition<ClassType>> classNameToDefMap =
-                new HashMap<>();
-
-        Set<String> transientTypes;
-
-        List<AttributeInfo> recursiveRefs;
-        List<DataTypes.ArrayType> recursiveArrayTypes;
-        List<DataTypes.MapType> recursiveMapTypes;
-
-
-        TransientTypeSystem(ImmutableList<StructTypeDefinition> structDefs,
-                            ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
-                            ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs) {
-
-            this.structDefs = structDefs;
-            this.traitDefs = traitDefs;
-            this.classDefs = classDefs;
-            structNameToDefMap = new HashMap<>();
-            traitNameToDefMap = new HashMap<>();
-            classNameToDefMap = new HashMap<>();
-
-            recursiveRefs = new ArrayList<>();
-            recursiveArrayTypes = new ArrayList<>();
-            recursiveMapTypes = new ArrayList<>();
-            transientTypes = new LinkedHashSet<>();
-        }
-
-        private IDataType dataType(String name) {
-            return TypeSystem.this.types.get(name);
-        }
-
-        /*
-         * Step 1:
-         * - validate cannot redefine types
-         * - setup shallow Type instances to facilitate recursive type graphs
-         */
-        private void step1() throws MetadataException {
-            for (StructTypeDefinition sDef : structDefs) {
-                assert sDef.typeName != null;
-                if (dataType(sDef.typeName) != null) {
-                    throw new MetadataException(
-                            String.format("Cannot redefine type %s", sDef.typeName));
-                }
-                TypeSystem.this.types.put(sDef.typeName,
-                        new StructType(this, sDef.typeName, sDef.attributeDefinitions.length));
-                structNameToDefMap.put(sDef.typeName, sDef);
-                transientTypes.add(sDef.typeName);
-            }
-
-            for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
-                assert traitDef.typeName != null;
-                if (types.containsKey(traitDef.typeName)) {
-                    throw new MetadataException(
-                            String.format("Cannot redefine type %s", traitDef.typeName));
-                }
-
-                TypeSystem.this.types.put(traitDef.typeName,
-                        new TraitType(this, traitDef.typeName, traitDef.superTypes,
-                                traitDef.attributeDefinitions.length));
-                traitNameToDefMap.put(traitDef.typeName, traitDef);
-                transientTypes.add(traitDef.typeName);
-            }
-
-            for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
-                assert classDef.typeName != null;
-                if (types.containsKey(classDef.typeName)) {
-                    throw new MetadataException(
-                            String.format("Cannot redefine type %s", classDef.typeName));
-                }
-
-                TypeSystem.this.types.put(classDef.typeName,
-                        new ClassType(this, classDef.typeName, classDef.superTypes,
-                                classDef.attributeDefinitions.length));
-                classNameToDefMap.put(classDef.typeName, classDef);
-                transientTypes.add(classDef.typeName);
-            }
-        }
-
-        private <U extends HierarchicalType> void validateSuperTypes(Class<U> cls,
-                                                                     HierarchicalTypeDefinition<U> def)
-        throws MetadataException {
-            Set<String> s = new HashSet<>();
-            ImmutableList<String> superTypes = def.superTypes;
-            for (String superTypeName : superTypes) {
-
-                if (s.contains(superTypeName)) {
-                    throw new MetadataException(
-                            String.format("Type %s extends superType %s multiple times",
-                                    def.typeName, superTypeName));
-                }
-
-                IDataType dT = dataType(superTypeName);
-
-                if (dT == null) {
-                    throw new MetadataException(
-                            String.format("Unknown superType %s in definition of type %s",
-                                    superTypeName, def.typeName));
-                }
-
-                if (!cls.isAssignableFrom(dT.getClass())) {
-                    throw new MetadataException(
-                            String.format("SuperType %s must be a %s, in definition of type %s",
-                                    superTypeName, cls.getName(), def.typeName));
-                }
-                s.add(superTypeName);
-            }
-        }
-
-        /*
-         * Step 2:
-         * - for Hierarchical Types, validate SuperTypes.
-         * - for each Hierarchical Type setup their SuperTypes Graph
-         */
-        private void step2() throws MetadataException {
-            for (HierarchicalTypeDefinition<TraitType> traitDef : traitDefs) {
-                validateSuperTypes(TraitType.class, traitDef);
-                TraitType traitType = getDataType(TraitType.class, traitDef.typeName);
-                traitType.setupSuperTypesGraph();
-            }
-
-            for (HierarchicalTypeDefinition<ClassType> classDef : classDefs) {
-                validateSuperTypes(ClassType.class, classDef);
-                ClassType classType = getDataType(ClassType.class, classDef.typeName);
-                classType.setupSuperTypesGraph();
-            }
-        }
-
-        private AttributeInfo constructAttributeInfo(AttributeDefinition attrDef)
-        throws MetadataException {
-            AttributeInfo info = new AttributeInfo(this, attrDef, null);
-            if (transientTypes.contains(attrDef.dataTypeName)) {
-                recursiveRefs.add(info);
-            }
-            if (info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY) {
-                DataTypes.ArrayType arrType = (DataTypes.ArrayType) info.dataType();
-                if (transientTypes.contains(arrType.getElemType().getName())) {
-                    recursiveArrayTypes.add(arrType);
-                }
-            }
-            if (info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP) {
-                DataTypes.MapType mapType = (DataTypes.MapType) info.dataType();
-                if (transientTypes.contains(mapType.getKeyType().getName())) {
-                    recursiveMapTypes.add(mapType);
-                } else if (transientTypes.contains(mapType.getValueType().getName())) {
-                    recursiveMapTypes.add(mapType);
-                }
-            }
-
-            if (info.multiplicity.upper > 1 && !(
-                    info.dataType().getTypeCategory() == DataTypes.TypeCategory.MAP ||
-                            info.dataType().getTypeCategory() == DataTypes.TypeCategory.ARRAY)) {
-                throw new MetadataException(
-                        String.format("A multiplicty of more than one requires a collection type for attribute '%s'",
-                                info.name));
-            }
-
-            return info;
-        }
-
-        private StructType constructStructureType(StructTypeDefinition def)
-        throws MetadataException {
-            AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length];
-            for (int i = 0; i < def.attributeDefinitions.length; i++) {
-                infos[i] = constructAttributeInfo(def.attributeDefinitions[i]);
-            }
-
-            StructType type = new StructType(TypeSystem.this, def.typeName, null, infos);
-            TypeSystem.this.types.put(def.typeName, type);
-            return type;
-        }
-
-        private <U extends HierarchicalType> U constructHierarchicalType(Class<U> cls,
-                                                                         HierarchicalTypeDefinition<U> def)
-        throws MetadataException {
-            AttributeInfo[] infos = new AttributeInfo[def.attributeDefinitions.length];
-            for (int i = 0; i < def.attributeDefinitions.length; i++) {
-                infos[i] = constructAttributeInfo(def.attributeDefinitions[i]);
-            }
-
-            try {
-                Constructor<U> cons = cls.getDeclaredConstructor(
-                        TypeSystem.class,
-                        String.class,
-                        ImmutableList.class,
-                        AttributeInfo[].class);
-                U type = cons.newInstance(TypeSystem.this, def.typeName, def.superTypes, infos);
-                TypeSystem.this.types.put(def.typeName, type);
-                return type;
-            } catch (Exception e) {
-                throw new MetadataException(
-                        String.format("Cannot construct Type of MetaType %s", cls.getName()), e);
-            }
-        }
-
-        /*
-         * Step 3:
-         * - Order Hierarchical Types in order of SuperType before SubType.
-         * - Construct all the Types
-         */
-        private void step3() throws MetadataException {
-
-            List<TraitType> traitTypes = new ArrayList<>();
-            for (String traitTypeName : traitNameToDefMap.keySet()) {
-                traitTypes.add(getDataType(TraitType.class, traitTypeName));
-            }
-            Collections.sort(traitTypes);
-
-            List<ClassType> classTypes = new ArrayList<>();
-            for (String classTypeName : classNameToDefMap.keySet()) {
-                classTypes.add(getDataType(ClassType.class, classTypeName));
-            }
-            Collections.sort(classTypes);
-
-            for (StructTypeDefinition structDef : structDefs) {
-                constructStructureType(structDef);
-                typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.CLASS, structDef.typeName);
-            }
-
-            for (TraitType traitType : traitTypes) {
-                constructHierarchicalType(TraitType.class,
-                        traitNameToDefMap.get(traitType.getName()));
-                typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.TRAIT, traitType.getName());
-            }
-
-            for (ClassType classType : classTypes) {
-                constructHierarchicalType(ClassType.class,
-                        classNameToDefMap.get(classType.getName()));
-                typeCategoriesToTypeNamesMap.put(DataTypes.TypeCategory.CLASS, classType.getName());
-            }
-        }
-
-        /*
-         * Step 4:
-         * - fix up references in recursive AttrInfo and recursive Collection Types.
-         */
-        private void step4() throws MetadataException {
-            for (AttributeInfo info : recursiveRefs) {
-                info.setDataType(dataType(info.dataType().getName()));
-            }
-            for (DataTypes.ArrayType arrType : recursiveArrayTypes) {
-                arrType.setElemType(dataType(arrType.getElemType().getName()));
-            }
-            for (DataTypes.MapType mapType : recursiveMapTypes) {
-                mapType.setKeyType(dataType(mapType.getKeyType().getName()));
-                mapType.setValueType(dataType(mapType.getValueType().getName()));
-            }
-        }
-
-        Map<String, IDataType> defineTypes() throws MetadataException {
-            step1();
-            step2();
-            try {
-                step3();
-                step4();
-            } catch (MetadataException me) {
-                for (String sT : transientTypes) {
-                    types.remove(sT);
-                }
-                throw me;
-            }
-
-            Map<String, IDataType> newTypes = new HashMap<>();
-
-            for (String tName : transientTypes) {
-                newTypes.put(tName, dataType(tName));
-            }
-            return newTypes;
-        }
-
-        @Override
-        public ImmutableList<String> getTypeNames() {
-            return TypeSystem.this.getTypeNames();
-        }
-
-        @Override
-        public <T> T getDataType(Class<T> cls, String name) throws MetadataException {
-            return TypeSystem.this.getDataType(cls, name);
-        }
-
-        @Override
-        public StructType defineStructType(String name, boolean errorIfExists,
-                                           AttributeDefinition... attrDefs)
-        throws MetadataException {
-            throw new MetadataException("Internal Error: define type called on TrasientTypeSystem");
-        }
-
-        @Override
-        public TraitType defineTraitType(HierarchicalTypeDefinition traitDef)
-        throws MetadataException {
-            throw new MetadataException("Internal Error: define type called on TrasientTypeSystem");
-        }
-
-        @Override
-        public ClassType defineClassType(HierarchicalTypeDefinition<ClassType> classDef
-        ) throws MetadataException {
-            throw new MetadataException("Internal Error: define type called on TrasientTypeSystem");
-        }
-
-        @Override
-        public Map<String, IDataType> defineTypes(ImmutableList<StructTypeDefinition> structDefs,
-                                                  ImmutableList<HierarchicalTypeDefinition<TraitType>> traitDefs,
-                                                  ImmutableList<HierarchicalTypeDefinition<ClassType>> classDefs)
-        throws MetadataException {
-            throw new MetadataException("Internal Error: define type called on TrasientTypeSystem");
-        }
-
-        @Override
-        public DataTypes.ArrayType defineArrayType(IDataType elemType) throws MetadataException {
-            throw new MetadataException("Internal Error: define type called on TrasientTypeSystem");
-        }
-
-        @Override
-        public DataTypes.MapType defineMapType(IDataType keyType, IDataType valueType)
-        throws MetadataException {
-            throw new MetadataException("Internal Error: define type called on TrasientTypeSystem");
-        }
-    }
-
-    public class IdType {
-        private static final String ID_ATTRNAME = "guid";
-        private static final String  TYPENAME_ATTRNAME = "typeName";
-        private static final String  TYP_NAME = "__IdType";
-
-        private IdType() {
-            AttributeDefinition idAttr = new AttributeDefinition(ID_ATTRNAME,
-                    DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null);
-            AttributeDefinition typNmAttr =
-                    new AttributeDefinition(TYPENAME_ATTRNAME,
-                            DataTypes.STRING_TYPE.getName(), Multiplicity.REQUIRED, false, null);
-            try {
-                AttributeInfo[] infos = new AttributeInfo[2];
-                infos[0] = new AttributeInfo(TypeSystem.this, idAttr, null);
-                infos[1] = new AttributeInfo(TypeSystem.this, typNmAttr, null);
-
-                StructType type = new StructType(TypeSystem.this, TYP_NAME, null, infos);
-                TypeSystem.this.types.put(TYP_NAME, type);
-
-            } catch (MetadataException me) {
-                throw new RuntimeException(me);
-            }
-        }
-
-        public StructType getStructType() throws MetadataException {
-            return getDataType(StructType.class, TYP_NAME);
-        }
-
-        public String getName() { return TYP_NAME; }
-        public String idAttrName() { return ID_ATTRNAME;}
-        public String typeNameAttrName() { return TYPENAME_ATTRNAME;}
-    }
-
-    public static final String ID_STRUCT_ID_ATTRNAME = IdType.ID_ATTRNAME;
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeUtils.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeUtils.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeUtils.java
deleted file mode 100755
index f494c4a..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypeUtils.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.typesystem.TypesDef;
-import scala.collection.JavaConversions;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class TypeUtils {
-
-    public static final String NAME_REGEX = "[a-zA-z][a-zA-Z0-9_]*";
-    public static final Pattern NAME_PATTERN = Pattern.compile(NAME_REGEX);
-    public static final Pattern ARRAY_TYPE_NAME_PATTERN = Pattern
-            .compile(String.format("array<(%s)>", NAME_REGEX));
-    public static final Pattern MAP_TYPE_NAME_PATTERN =
-            Pattern.compile(String.format("map<(%s),(%s)>", NAME_REGEX, NAME_REGEX));
-
-    public static void outputVal(String val, Appendable buf, String prefix)
-    throws MetadataException {
-        try {
-            buf.append(prefix).append(val);
-        } catch (IOException ie) {
-            throw new MetadataException(ie);
-        }
-    }
-
-    public static String parseAsArrayType(String typeName) {
-        Matcher m = ARRAY_TYPE_NAME_PATTERN.matcher(typeName);
-        return m.matches() ? m.group(1) : null;
-    }
-
-    public static String[] parseAsMapType(String typeName) {
-        Matcher m = MAP_TYPE_NAME_PATTERN.matcher(typeName);
-        return m.matches() ? new String[]{m.group(1), m.group(2)} : null;
-    }
-
-    public static Map<AttributeInfo, List<String>> buildAttrInfoToNameMap(FieldMapping f) {
-        Map<AttributeInfo, List<String>> b = new HashMap<AttributeInfo, List<String>>();
-        for (Map.Entry<String, AttributeInfo> e : f.fields.entrySet()) {
-            List<String> names = b.get(e.getValue());
-            if (names == null) {
-                names = new ArrayList<String>();
-                b.put(e.getValue(), names);
-            }
-            names.add(e.getKey());
-        }
-        return ImmutableMap.copyOf(b);
-    }
-
-    public static TypesDef getTypesDef(ImmutableList<EnumTypeDefinition> enums, ImmutableList<StructTypeDefinition> structs,
-                                         ImmutableList<HierarchicalTypeDefinition<TraitType>> traits,
-                                         ImmutableList<HierarchicalTypeDefinition<ClassType>> classes) {
-        return new TypesDef(JavaConversions.asScalaBuffer(enums), JavaConversions.asScalaBuffer(structs),
-                JavaConversions.asScalaBuffer(traits), JavaConversions.asScalaBuffer(classes));
-    }
-
-    protected static class Pair<L,R> {
-        protected L left;
-        protected R right;
-
-        public Pair(L left, R right) {
-            this.left = left;
-            this.right = right;
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypedStructHandler.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypedStructHandler.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypedStructHandler.java
deleted file mode 100755
index c0e1c56..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/TypedStructHandler.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types;
-
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import org.apache.hadoop.metadata.MetadataException;
-import org.apache.hadoop.metadata.typesystem.IStruct;
-import org.apache.hadoop.metadata.typesystem.ITypedStruct;
-import org.apache.hadoop.metadata.typesystem.Struct;
-import org.apache.hadoop.metadata.typesystem.persistence.Id;
-import org.apache.hadoop.metadata.typesystem.persistence.ReferenceableInstance;
-import org.apache.hadoop.metadata.typesystem.persistence.StructInstance;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.Date;
-import java.util.Map;
-
-public class TypedStructHandler {
-
-    private final IConstructableType<IStruct, ITypedStruct> structType;
-    private final FieldMapping fieldMapping;
-
-    public TypedStructHandler(IConstructableType<IStruct, ITypedStruct> structType) {
-        this.structType = structType;
-        fieldMapping = structType.fieldMapping();
-    }
-
-    public ITypedStruct convert(Object val, Multiplicity m) throws MetadataException {
-        if (val != null) {
-            if (val instanceof ITypedStruct) {
-                ITypedStruct ts = (ITypedStruct) val;
-                if (ts.getTypeName() != structType.getName()) {
-                    throw new ValueConversionException(structType, val);
-                }
-                return ts;
-            } else if (val instanceof Struct) {
-                Struct s = (Struct) val;
-                if (!s.typeName.equals(structType.getName())) {
-                    throw new ValueConversionException(structType, val);
-                }
-                ITypedStruct ts = createInstance();
-                for (Map.Entry<String, AttributeInfo> e : fieldMapping.fields.entrySet()) {
-                    String attrKey = e.getKey();
-                    AttributeInfo i = e.getValue();
-                    Object aVal = s.get(attrKey);
-                    try {
-                        ts.set(attrKey, aVal);
-                    } catch (ValueConversionException ve) {
-                        throw new ValueConversionException(structType, val, ve);
-                    }
-                }
-                return ts;
-            } else if (val instanceof StructInstance &&
-                    ((StructInstance) val).getTypeName() == structType.getName()) {
-                return (StructInstance) val;
-            } else {
-                throw new ValueConversionException(structType, val);
-            }
-        }
-        if (!m.nullAllowed()) {
-            throw new ValueConversionException.NullConversionException(m);
-        }
-        return null;
-    }
-
-    public DataTypes.TypeCategory getTypeCategory() {
-        return DataTypes.TypeCategory.STRUCT;
-    }
-
-    public ITypedStruct createInstance() {
-        return new StructInstance(structType.getName(),
-                fieldMapping,
-                new boolean[fieldMapping.fields.size()],
-                fieldMapping.numBools == 0 ? null : new boolean[fieldMapping.numBools],
-                fieldMapping.numBytes == 0 ? null : new byte[fieldMapping.numBytes],
-                fieldMapping.numShorts == 0 ? null : new short[fieldMapping.numShorts],
-                fieldMapping.numInts == 0 ? null : new int[fieldMapping.numInts],
-                fieldMapping.numLongs == 0 ? null : new long[fieldMapping.numLongs],
-                fieldMapping.numFloats == 0 ? null : new float[fieldMapping.numFloats],
-                fieldMapping.numDoubles == 0 ? null : new double[fieldMapping.numDoubles],
-                fieldMapping.numBigDecimals == 0 ? null
-                        : new BigDecimal[fieldMapping.numBigDecimals],
-                fieldMapping.numBigInts == 0 ? null : new BigInteger[fieldMapping.numBigInts],
-                fieldMapping.numDates == 0 ? null : new Date[fieldMapping.numDates],
-                fieldMapping.numStrings == 0 ? null : new String[fieldMapping.numStrings],
-                fieldMapping.numArrays == 0 ? null : new ImmutableList[fieldMapping.numArrays],
-                fieldMapping.numMaps == 0 ? null : new ImmutableMap[fieldMapping.numMaps],
-                fieldMapping.numStructs == 0 ? null : new StructInstance[fieldMapping.numStructs],
-                fieldMapping.numReferenceables == 0 ? null
-                        : new ReferenceableInstance[fieldMapping.numReferenceables],
-                fieldMapping.numReferenceables == 0 ? null
-                        : new Id[fieldMapping.numReferenceables]);
-    }
-
-    public void output(IStruct s, Appendable buf, String prefix) throws MetadataException {
-        fieldMapping.output(s, buf, prefix);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/ValueConversionException.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/ValueConversionException.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/ValueConversionException.java
deleted file mode 100755
index 031f8be..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/ValueConversionException.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types;
-
-import org.apache.hadoop.metadata.MetadataException;
-
-public class ValueConversionException extends MetadataException {
-
-    public ValueConversionException(IDataType typ, Object val) {
-        this(typ, val, (Throwable) null);
-    }
-
-    public ValueConversionException(IDataType typ, Object val, Throwable t) {
-        super(String
-                .format("Cannot convert value '%s' to datatype %s", val.toString(), typ.getName()),
-                t);
-    }
-
-    public ValueConversionException(IDataType typ, Object val, String msg) {
-        super(String.format("Cannot convert value '%s' to datatype %s because: %s",
-                val.toString(), typ.getName(), msg));
-    }
-
-    public ValueConversionException(String typeName, Object val, String msg) {
-        super(String.format("Cannot convert value '%s' to datatype %s because: %s",
-                val.toString(), typeName, msg));
-    }
-
-    protected ValueConversionException(String msg) {
-        super(msg);
-    }
-
-    public static class NullConversionException extends ValueConversionException {
-        public NullConversionException(Multiplicity m) {
-            super(String.format("Null value not allowed for multiplicty %s", m));
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/package-info.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/package-info.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/package-info.java
deleted file mode 100755
index 4cf9967..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/package-info.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * <h2>Types:</h2>
- * <img src="doc-files/dataTypes.png" />
- * <ul>
- *     <li> <b>IDataType:</b> Represents a <i>DataType</i> in the TypeSystem. All Instances and
- *     Attributes are associated
- *     with a DataType. They represent the <b>Set</b> of values that Instances/Attributes of this
- *     type can have.
- *     Currently the namespace of DataTypes is flat. DataTypes can be asked to <i>convert</i>
- *     arbitrary java Objects
- *     to instances of this type, and they can be asked for a String representation of an
- *     instance.</li>
- *     <li><b>Type Categories:</b></li> DataTypes are grouped into Categories. A Category implies
- *     certain semantics about
- *     the Types belonging to the Category. We have PRIMITIVE, ENUM, ARRAY, MAP, STRUCT, TRAIT,
- *     and CLASS categories.
- *     <li><b>Primitive Types:</b> There are corresponding DataTypes for the java primitives:
- *     Boolean, Byte, Short,
- *     Int, Long, Float, Double. We also support BigInteger, BigDecimal, String, and Date</li>
- *     <li><b>Collection Types:</b>ArrayType and MapType are parameterized DataTypes taking one
- *     and two parameters
- *     respectively.</li>
- *     <li><b>Enum Types:</b> Used to define DataTypes with all valid values listed in the Type
- *     definition. For e.g.
- * <pre>
- * {@code
- * ts.defineEnumType("HiveObjectType",
-new EnumValue("GLOBAL", 1),
-new EnumValue("DATABASE", 2),
-new EnumValue("TABLE", 3),
-new EnumValue("PARTITION", 4),
-new EnumValue("COLUMN", 5))
- * }
- * </pre> Each <i>EnumValue</i> has name and an ordinal. Either one can be used as a value for an
- * Attribute of this Type.
- *     </li>
- *     <li><b>Constructable Types:</b> Are complex Types that are composed of Attributes. We
- *     support Structs, Classes
- *     and Traits constructable types. A ConstructableType is parameterized by the Type of its
- *     <i>Instance</i> java
- *     class(these are implementations of the ITypedInstance interface). A value of the
- *     IConstructableType will
- *     implement this parameterized Type. IConstructableTypes can be asked to create an 'empty'
- *     instance of their Type.
- *     IConstructableTypes are associated with FieldMappings that encapsulate the mapping from/to
- *     the ITypedInstance
- *     java object.
- *     </li>
- *     <li><b>Attribute Info:</b>Represents an Attribute of a complex datatype. Attributes are
- *     defined by a name, a
- *     dataType, its Multiplicity and whether it is a composite relation. <i>Multiplicity</i> is
- *     a constraint on the
- *     number of instances that an instance can have. For non collection types and Maps:
- *     Multiplicity is OPTIONAL or
- *     REQUIRED.
- *     For Arrays the Multiplicity is specified by a lower-bound, upper-bound and a uniqueness
- *     constraint.
- *     </li>
- *     <li><b>Struct Types:</b>Are IConstructableTypes whose instances are IStructs. Conceptually
- *     these are like 'C'
- *     structs: they represent a collection of Attributes. For e.g.
- * <pre>
- * {@code
- * ts.defineStructType(STRUCT_TYPE_1,
-true,
-createRequiredAttrDef("a", DataTypes.INT_TYPE),
-createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE),
-createOptionalAttrDef("c", DataTypes.BYTE_TYPE),
-createOptionalAttrDef("d", DataTypes.SHORT_TYPE),
-createOptionalAttrDef("e", DataTypes.INT_TYPE),
-createOptionalAttrDef("f", DataTypes.INT_TYPE),
-createOptionalAttrDef("g", DataTypes.LONG_TYPE),
-createOptionalAttrDef("h", DataTypes.FLOAT_TYPE),
-createOptionalAttrDef("i", DataTypes.DOUBLE_TYPE),
-createOptionalAttrDef("j", DataTypes.BIGINTEGER_TYPE),
-createOptionalAttrDef("k", DataTypes.BIGDECIMAL_TYPE),
-createOptionalAttrDef("l", DataTypes.DATE_TYPE),
-createOptionalAttrDef("m", ts.defineArrayType(DataTypes.INT_TYPE)),
-createOptionalAttrDef("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)),
-createOptionalAttrDef("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))
- * }
- * </pre>
- *     </li>
- *     <li><b>Hierarchical Types:</b>Are DataTypes that can have a SuperType. Classes and Traits
- *     are the supported
- *     Hierarchical Types. </li>
- *     <li><b>Class Types:</b></li>
- *     <li><b>Trait Types:</b></li>
- * </ul>
- *
- *
- * <h2>Instances:</h2>
- * <img src="doc-files/instance.png" />
- * <ul>
- *     <li> <b>IStruct:</b></li>
- *     <li><b>IReferenceableInstance:</b></li>
- *     <li><b>ITypedStruct:</b></li>
- *     <li><b>ITypedReferenceableInstance:</b></li>
- * </ul>
- *
- * <h3>Serialization of Types:</h3>
- *
- * <h3>Serialization of Instances:</h3>
- *
- * <h3>Searching on Classes and Traits:</h3>
- */
-package org.apache.hadoop.metadata.typesystem.types;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/utils/TypesUtil.java
----------------------------------------------------------------------
diff --git a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/utils/TypesUtil.java b/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/utils/TypesUtil.java
deleted file mode 100755
index 4c2d3d6..0000000
--- a/typesystem/src/main/java/org/apache/hadoop/metadata/typesystem/types/utils/TypesUtil.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.typesystem.types.utils;
-
-import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.ClassType;
-import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.EnumValue;
-import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.IDataType;
-import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
-import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.TraitType;
-
-/**
- * Types utilities class.
- */
-public class TypesUtil {
-
-    private TypesUtil() {
-    }
-
-    public static AttributeDefinition createOptionalAttrDef(String name,
-                                                            IDataType dataType) {
-        return new AttributeDefinition(name, dataType.getName(),
-                Multiplicity.OPTIONAL, false, null);
-    }
-
-    public static AttributeDefinition createOptionalAttrDef(String name,
-                                                            String dataType) {
-        return new AttributeDefinition(name, dataType, Multiplicity.OPTIONAL, false, null);
-    }
-
-    public static AttributeDefinition createRequiredAttrDef(String name,
-                                                            String dataType) {
-        return new AttributeDefinition(name, dataType, Multiplicity.REQUIRED, false, null);
-    }
-
-    public static AttributeDefinition createUniqueRequiredAttrDef(String name,
-                                                                  IDataType dataType) {
-        return new AttributeDefinition(name, dataType.getName(),
-                Multiplicity.REQUIRED, false, true, true, null);
-    }
-
-    public static AttributeDefinition createRequiredAttrDef(String name,
-                                                            IDataType dataType) {
-        return new AttributeDefinition(name, dataType.getName(),
-                Multiplicity.REQUIRED, false, null);
-    }
-
-    public static EnumTypeDefinition createEnumTypeDef(String name, EnumValue... enumValues) {
-        return new EnumTypeDefinition(name, enumValues);
-    }
-
-    public static HierarchicalTypeDefinition<TraitType> createTraitTypeDef(
-            String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
-        return new HierarchicalTypeDefinition<>(TraitType.class, name, superTypes, attrDefs);
-    }
-
-    public static StructTypeDefinition createStructTypeDef(String name, AttributeDefinition... attrDefs) {
-        return new StructTypeDefinition(name, attrDefs);
-    }
-
-    public static HierarchicalTypeDefinition<ClassType> createClassTypeDef(
-            String name, ImmutableList<String> superTypes, AttributeDefinition... attrDefs) {
-        return new HierarchicalTypeDefinition<>(ClassType.class, name, superTypes, attrDefs);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
new file mode 100755
index 0000000..b51048d
--- /dev/null
+++ b/typesystem/src/main/scala/org/apache/atlas/typesystem/TypesDef.scala
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.typesystem
+
+import org.apache.atlas.typesystem.types._
+
+case class TypesDef(enumTypes: Seq[EnumTypeDefinition],
+                    structTypes: Seq[StructTypeDefinition],
+                    traitTypes: Seq[HierarchicalTypeDefinition[TraitType]],
+                    classTypes: Seq[HierarchicalTypeDefinition[ClassType]]) {
+    def this() = this(Seq(), Seq(), Seq(), Seq())
+    def this(enumType : EnumTypeDefinition) = this(Seq(enumType), Seq(), Seq(), Seq())
+    def this(structType: StructTypeDefinition) = this(Seq(), Seq(structType), Seq(), Seq())
+    def this(typ: HierarchicalTypeDefinition[_], isTrait : Boolean) = this(
+      Seq(),
+      Seq(),
+      if ( isTrait )
+        Seq(typ.asInstanceOf[HierarchicalTypeDefinition[TraitType]]) else Seq(),
+      if (!isTrait )
+        Seq(typ.asInstanceOf[HierarchicalTypeDefinition[ClassType]]) else Seq()
+    )
+
+    def enumTypesAsJavaList() = {
+        import scala.collection.JavaConverters._
+        enumTypes.asJava
+    }
+
+    def structTypesAsJavaList() = {
+        import scala.collection.JavaConverters._
+        structTypes.asJava
+    }
+
+    def traitTypesAsJavaList() = {
+        import scala.collection.JavaConverters._
+        traitTypes.asJava
+    }
+
+    def classTypesAsJavaList() = {
+        import scala.collection.JavaConverters._
+        classTypes.asJava
+    }
+
+    def isEmpty() = {
+      enumTypes.isEmpty & structTypes.isEmpty & traitTypes.isEmpty & classTypes.isEmpty
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
new file mode 100644
index 0000000..df1851c
--- /dev/null
+++ b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/InstanceBuilder.scala
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.typesystem.builders
+
+import org.apache.atlas.typesystem.{IReferenceableInstance, IStruct, Referenceable, Struct}
+
+import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
+import scala.collection.mutable.ArrayBuffer
+import scala.language.{dynamics, implicitConversions}
+import scala.util.DynamicVariable
+
+class InstanceBuilder extends Dynamic {
+
+  private val references : ArrayBuffer[Referenceable] = new ArrayBuffer[Referenceable]()
+
+  val context = new DynamicVariable[DynamicStruct](null)
+
+  def struct(typeName : String) : DynamicStruct = {
+    context.value = new DynamicStruct(this, new Struct(typeName))
+    context.value
+  }
+
+  def instance(typeName: String, traitNames: String*)(f : => Unit) : DynamicReference = {
+    val r = new Referenceable(typeName, traitNames:_*)
+    references.append(r)
+    val dr = new DynamicReference(this, r)
+    context.withValue(dr){f}
+    dr
+  }
+
+  def create( f : => Unit ) : java.util.List[Referenceable] = {
+    f
+    references.asJava
+  }
+
+  def applyDynamic(name : String)(value : Any) : Any = {
+    context.value.updateDynamic(name)(value)
+  }
+
+  implicit def symbolToDynamicStruct(s : Symbol) : DynamicValue =
+    new DynamicValue(this, s.name, if (context.value == null) null else context.value.s)
+
+}
+
+object DynamicValue {
+
+  private[builders] def transformOut(s: IStruct, attr : String, v : Any)(implicit ib : InstanceBuilder) : DynamicValue =
+    v match {
+    case r : Referenceable => new DynamicReference(ib, r)
+    case s : Struct => new DynamicStruct(ib, s)
+    case jL : java.util.List[_] => {
+      if ( s != null ) {
+        new DynamicCollection(ib, attr, s)
+      } else {
+        new DynamicValue(ib, attr, s, jL.map{ e => transformOut(null, null, e)}.toSeq)
+      }
+    }
+    case jM : java.util.Map[_,_] => {
+      if ( s != null ) {
+        new DynamicMap(ib, attr, s)
+      } else {
+        new DynamicValue(ib, attr, s, jM.map {
+          case (k, v) => k -> transformOut(null, null, v)
+        }.toMap)
+      }
+    }
+    case x => {
+      if ( s != null ) {
+        new DynamicValue(ib, attr, s)
+      } else {
+        new DynamicValue(ib, attr, s, x)
+      }
+    }
+  }
+
+  private[builders] def transformIn(v : Any) : Any = v match {
+    case dr : DynamicReference => dr.r
+    case ds : DynamicStruct => ds.s
+    case dv : DynamicValue => dv.get
+    case l : Seq[_] => l.map{ e => transformIn(e)}.asJava
+    case m : Map[_,_] => m.map {
+      case (k,v) => k -> transformIn(v)
+    }.asJava
+    case x => x
+  }
+
+}
+
+class DynamicValue(val ib : InstanceBuilder, val attrName : String, val s: IStruct, var value : Any = null) extends Dynamic {
+  import DynamicValue._
+
+  implicit val iib : InstanceBuilder = ib
+
+  def ~(v : Any): Unit = {
+    if ( s != null ) {
+      s.set(attrName, transformIn(v))
+    } else {
+      value = v
+    }
+  }
+
+  def get : Any = if ( s != null ) s.get(attrName) else value
+
+  def selectDynamic(name: String) : DynamicValue = {
+
+    throw new UnsupportedOperationException()
+  }
+
+  def update(key : Any, value : Object): Unit = {
+    throw new UnsupportedOperationException()
+  }
+
+  def apply(key : Any): DynamicValue = {
+
+    if ( s != null && s.isInstanceOf[Referenceable] && key.isInstanceOf[String]) {
+      val r = s.asInstanceOf[Referenceable]
+      if ( r.getTraits contains attrName ) {
+        val traitAttr = key.asInstanceOf[String]
+        return new DynamicStruct(ib, r.getTrait(attrName)).selectDynamic(traitAttr)
+      }
+    }
+    throw new UnsupportedOperationException()
+  }
+}
+
+class DynamicCollection(ib : InstanceBuilder, attrName : String, s: IStruct) extends DynamicValue(ib, attrName ,s) {
+  import DynamicValue._
+
+  override def update(key : Any, value : Object): Unit = {
+    var jL = s.get(attrName)
+    val idx = key.asInstanceOf[Int]
+    if (jL == null ) {
+      val l = new java.util.ArrayList[Object]()
+      l.ensureCapacity(idx)
+      jL = l
+    }
+    val nJL = new java.util.ArrayList[Object](jL.asInstanceOf[java.util.List[Object]])
+    nJL.asInstanceOf[java.util.List[Object]].set(idx, transformIn(value).asInstanceOf[Object])
+    s.set(attrName, nJL)
+  }
+
+  override def apply(key : Any): DynamicValue = {
+    var jL = s.get(attrName)
+    val idx = key.asInstanceOf[Int]
+    if (jL == null ) {
+      null
+    } else {
+      transformOut(null, null, jL.asInstanceOf[java.util.List[Object]].get(idx))
+    }
+  }
+}
+
+class DynamicMap(ib : InstanceBuilder, attrName : String, s: IStruct) extends DynamicValue(ib, attrName ,s) {
+  import DynamicValue._
+  override def update(key : Any, value : Object): Unit = {
+    var jM = s.get(attrName)
+    if (jM == null ) {
+      jM = new java.util.HashMap[Object, Object]()
+    }
+    jM.asInstanceOf[java.util.Map[Object, Object]].put(key.asInstanceOf[AnyRef], value)
+  }
+
+  override def apply(key : Any): DynamicValue = {
+    var jM = s.get(attrName)
+    if (jM == null ) {
+      null
+    } else {
+      transformOut(null, null, jM.asInstanceOf[java.util.Map[Object, Object]].get(key))
+    }
+  }
+}
+
+class DynamicStruct(ib : InstanceBuilder, s: IStruct) extends DynamicValue(ib, null ,s) {
+  import DynamicValue._
+  override def selectDynamic(name: String) : DynamicValue = {
+    transformOut(s, name, s.get(name))
+  }
+
+  def updateDynamic(name: String)(value: Any) {
+    s.set(name, transformIn(value))
+  }
+
+  override def ~(v : Any): Unit = { throw new UnsupportedOperationException()}
+  override def get : Any = s
+
+}
+
+class DynamicReference(ib : InstanceBuilder, val r : IReferenceableInstance) extends DynamicStruct(ib, r) {
+
+  private def _trait(name : String) = new DynamicStruct(ib, r.getTrait(name))
+
+  override def selectDynamic(name: String) : DynamicValue = {
+    if ( r.getTraits contains name ) {
+      _trait(name)
+    } else {
+      super.selectDynamic(name)
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
----------------------------------------------------------------------
diff --git a/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
new file mode 100644
index 0000000..cd711d6
--- /dev/null
+++ b/typesystem/src/main/scala/org/apache/atlas/typesystem/builders/TypesBuilder.scala
@@ -0,0 +1,188 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.typesystem.builders
+
+import com.google.common.collect.ImmutableList
+import org.apache.atlas.typesystem.TypesDef
+import org.apache.atlas.typesystem.types._
+import org.apache.atlas.typesystem.types.utils.TypesUtil
+
+import scala.collection.mutable.ArrayBuffer
+import scala.language.{dynamics, implicitConversions, postfixOps}
+import scala.util.DynamicVariable
+
+object TypesBuilder {
+
+  case class Context(enums : ArrayBuffer[EnumTypeDefinition],
+                      structs : ArrayBuffer[StructTypeDefinition],
+                      classes : ArrayBuffer[HierarchicalTypeDefinition[ClassType]],
+                      traits : ArrayBuffer[HierarchicalTypeDefinition[TraitType]],
+                      currentTypeAttrs : ArrayBuffer[Attr] = null)
+
+  class AttrOption()
+  class ReverseAttributeName(val rNm : String) extends AttrOption
+  class MultiplicityOption(val lower: Int, val upper: Int, val isUnique: Boolean) extends AttrOption
+
+  val required = new AttrOption()
+  val optional = new AttrOption()
+  val collection = new AttrOption()
+  val set = new AttrOption()
+  val composite = new AttrOption()
+  val unique = new AttrOption()
+  val indexed = new AttrOption()
+  def reverseAttributeName(rNm : String) = new ReverseAttributeName(rNm)
+  def multiplicty(lower: Int, upper: Int, isUnique: Boolean) = new MultiplicityOption(lower, upper, isUnique)
+
+  val boolean = DataTypes.BOOLEAN_TYPE.getName
+  val byte = DataTypes.BYTE_TYPE.getName
+  val short = DataTypes.SHORT_TYPE.getName
+  val int = DataTypes.INT_TYPE.getName
+  val long = DataTypes.LONG_TYPE.getName
+  val float = DataTypes.FLOAT_TYPE.getName
+
+  val double = DataTypes.DOUBLE_TYPE.getName
+  val bigint = DataTypes.BIGINTEGER_TYPE.getName
+  val bigdecimal = DataTypes.BIGDECIMAL_TYPE.getName
+  val date = DataTypes.DATE_TYPE.getName
+  val string = DataTypes.STRING_TYPE.getName
+
+  def array(t : String) : String = {
+    DataTypes.arrayTypeName(t)
+  }
+
+  def map(kt : String, vt : String) : String = {
+    DataTypes.mapTypeName(kt, vt)
+  }
+
+  class Attr(ctx : Context, val name : String) {
+
+    private var dataTypeName : String = DataTypes.BOOLEAN_TYPE.getName
+    private var multiplicity: Multiplicity = Multiplicity.OPTIONAL
+    private var isComposite: Boolean = false
+    private var reverseAttributeName: String = null
+    private var isUnique: Boolean = false
+    private var isIndexable: Boolean = false
+
+    ctx.currentTypeAttrs += this
+
+    def getDef : AttributeDefinition =
+      new AttributeDefinition(name, dataTypeName,
+        multiplicity, isComposite, isUnique, isIndexable, reverseAttributeName)
+
+    def `~`(dT : String, options : AttrOption*) : Attr = {
+      dataTypeName = dT
+      options.foreach { o =>
+        o match {
+          case `required` => {multiplicity = Multiplicity.REQUIRED}
+          case `optional` => {multiplicity = Multiplicity.OPTIONAL}
+          case `collection` => {multiplicity = Multiplicity.COLLECTION}
+          case `set` => {multiplicity = Multiplicity.SET}
+          case `composite` => {isComposite = true}
+          case `unique` => {isUnique = true}
+          case `indexed` => {isIndexable = true}
+          case m : MultiplicityOption => {multiplicity = new Multiplicity(m.lower, m.upper, m.isUnique)}
+          case r : ReverseAttributeName => {reverseAttributeName = r.rNm}
+          case _ => ()
+        }
+      }
+      this
+    }
+
+  }
+
+}
+
+class TypesBuilder {
+
+  import org.apache.atlas.typesystem.builders.TypesBuilder.{Attr, Context}
+
+  val required = TypesBuilder.required
+  val optional = TypesBuilder.optional
+  val collection = TypesBuilder.collection
+  val set = TypesBuilder.set
+  val composite = TypesBuilder.composite
+  val unique = TypesBuilder.unique
+  val indexed = TypesBuilder.indexed
+  def multiplicty = TypesBuilder.multiplicty _
+  def reverseAttributeName = TypesBuilder.reverseAttributeName _
+
+  val boolean = TypesBuilder.boolean
+  val byte = TypesBuilder.byte
+  val short = TypesBuilder.short
+  val int = TypesBuilder.int
+  val long = TypesBuilder.long
+  val float = TypesBuilder.float
+
+  val double = TypesBuilder.double
+  val bigint = TypesBuilder.bigint
+  val bigdecimal = TypesBuilder.bigdecimal
+  val date = TypesBuilder.date
+  val string = TypesBuilder.string
+
+  def array = TypesBuilder.array _
+
+  def map = TypesBuilder.map _
+
+  val context = new DynamicVariable[Context](Context(new ArrayBuffer(),
+    new ArrayBuffer(),
+    new ArrayBuffer(),
+    new ArrayBuffer()))
+
+  implicit def strToAttr(s : String) = new Attr(context.value, s)
+
+  def types(f : => Unit ) : TypesDef = {
+    f
+    TypesDef(context.value.enums.toSeq,
+      context.value.structs.toSeq,
+      context.value.traits.toSeq,
+      context.value.classes.toSeq)
+  }
+
+  def _class(name : String, superTypes : List[String] = List())(f : => Unit): Unit = {
+    val attrs = new ArrayBuffer[Attr]()
+    context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
+    context.value.classes +=
+      TypesUtil.createClassTypeDef(name, ImmutableList.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*)
+  }
+
+  def _trait(name : String, superTypes : List[String] = List())(f : => Unit): Unit = {
+    val attrs = new ArrayBuffer[Attr]()
+    context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
+    context.value.traits +=
+      TypesUtil.createTraitTypeDef(name, ImmutableList.copyOf[String](superTypes.toArray), attrs.map(_.getDef):_*)
+    val v = context.value
+    v.traits.size
+  }
+
+  def struct(name : String)(f : => Unit): Unit = {
+    val attrs = new ArrayBuffer[Attr]()
+    context.withValue(context.value.copy(currentTypeAttrs = attrs)){f}
+    context.value.structs +=
+      new StructTypeDefinition(name, attrs.map(_.getDef).toArray)
+  }
+
+  def enum(name : String, values : String*) : Unit = {
+    val enums = values.zipWithIndex.map{ case (v, i) =>
+        new EnumValue(v,i)
+    }
+    context.value.enums +=
+      TypesUtil.createEnumTypeDef(name, enums:_*)
+  }
+
+}


Mime
View raw message