atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From venkat...@apache.org
Subject [06/50] [abbrv] incubator-atlas git commit: removed un-used modules
Date Fri, 12 Jun 2015 20:38:26 GMT
removed un-used modules


Project: http://git-wip-us.apache.org/repos/asf/incubator-atlas/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-atlas/commit/22624786
Tree: http://git-wip-us.apache.org/repos/asf/incubator-atlas/tree/22624786
Diff: http://git-wip-us.apache.org/repos/asf/incubator-atlas/diff/22624786

Branch: refs/heads/apache-local
Commit: 22624786ee4fe86c94d94fee4bcf4c0855919901
Parents: fdd841d
Author: Shwetha GS <sshivalingamurthy@hortonworks.com>
Authored: Fri Jun 12 11:25:41 2015 +0530
Committer: Shwetha GS <sshivalingamurthy@hortonworks.com>
Committed: Fri Jun 12 11:25:41 2015 +0530

----------------------------------------------------------------------
 .../metadata-bridge-hive/pom.xml                |  112 --
 .../bridge/hivelineage/hook/HiveLineage.java    |  398 ------
 .../hivelineage/hook/HiveLineageInfo.java       |  535 --------
 .../metadata/bridge/hivelineage/hook/Hook.java  |  317 -----
 .../hivelineage/hook/HiveLineageInfoTest.java   |   34 -
 pom.xml                                         |    1 -
 tools/pom.xml                                   |  187 ---
 tools/src/main/resources/application.conf       |   54 -
 .../org/apache/atlas/tools/cli/Console.scala    |   67 -
 .../atlas/tools/dsl/DynamicTypedStruct.scala    |   38 -
 .../org/apache/atlas/tools/dsl/package.scala    |   99 --
 .../apache/atlas/tools/simpleserver/Main.scala  |   59 -
 .../tools/simpleserver/MetadataActor.scala      |  121 --
 .../tools/simpleserver/RestInterface.scala      |  126 --
 .../atlas/tools/thrift/ThriftParser.scala       |  664 ----------
 .../atlas/tools/thrift/ThriftTypesGen.scala     |  316 -----
 tools/src/test/resources/test.thrift            | 1146 ------------------
 .../org/apache/atlas/tools/dsl/DSLTest.scala    |  241 ----
 .../tools/hive/HiveMockMetadataService.scala    |   84 --
 .../atlas/tools/thrift/ThriftLexerTest.scala    |  525 --------
 .../atlas/tools/thrift/ThriftParserTest.scala   |  772 ------------
 21 files changed, 5896 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/addons/metadata-bridge-parent/metadata-bridge-hive/pom.xml
----------------------------------------------------------------------
diff --git a/addons/metadata-bridge-parent/metadata-bridge-hive/pom.xml b/addons/metadata-bridge-parent/metadata-bridge-hive/pom.xml
deleted file mode 100755
index 36dcee9..0000000
--- a/addons/metadata-bridge-parent/metadata-bridge-hive/pom.xml
+++ /dev/null
@@ -1,112 +0,0 @@
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xmlns="http://maven.apache.org/POM/4.0.0"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.atlas</groupId>
-        <artifactId>atlas-bridge-parent</artifactId>
-        <version>0.1-incubating-SNAPSHOT</version>
-    </parent>
-    <artifactId>atlas-bridge-hive</artifactId>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>com.google.code.gson</groupId>
-            <artifactId>gson</artifactId>
-            <version>2.2.2</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-common</artifactId>
-            <version>0.13.1</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <version>2.4.0</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hive</groupId>
-            <artifactId>hive-exec</artifactId>
-            <version>0.13.1</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-        </dependency>
-    </dependencies>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.1</version>
-                <configuration>
-                    <source>1.6</source>
-                    <target>1.6</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <version>2.4</version>
-            </plugin>
-
-            <!--
-                 <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-dependency-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>copy</id>
-                        <phase>prepare-package</phase>
-                        <goals>
-                            <goal>copy</goal>
-                        </goals>
-                        <configuration>
-                            <artifactItems>
-                                <artifactItem>
-                                    <groupId>org.apache.atlas</groupId>
-                                    <artifactId>atlas-common</artifactId>
-                                    <version>0.1-incubating-SNAPSHOT</version>
-                                    <outputDirectory>${project.build.directory}</outputDirectory>
-                                </artifactItem>
-                            </artifactItems>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-                  -->
-
-
-        </plugins>
-    </build>
-
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineage.java
----------------------------------------------------------------------
diff --git a/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineage.java b/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineage.java
deleted file mode 100755
index 7e180d2..0000000
--- a/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineage.java
+++ /dev/null
@@ -1,398 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.bridge.hivelineage.hook;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-
-
-public class HiveLineage implements Serializable {
-
-    /**
-     *
-     */
-    private static final long serialVersionUID = 1L;
-    public String queryId;
-    public String hiveId;
-    public String user;
-    public String queryStartTime;
-    public String queryEndTime;
-    public String query;
-    public String tableName;
-    public String databaseName;
-    public String action;
-    public String tableLocation;
-    public boolean success;
-    public boolean failed;
-    public String executionEngine;
-    ArrayList<SourceTables> sourceTables;
-    ArrayList<QueryColumns> queryColumns;
-    ArrayList<WhereClause> whereClause;
-    ArrayList<CreateColumns> createColumns;
-    ArrayList<GroupBy> groupBy;
-    ArrayList<GroupBy> orderBy;
-
-
-    public String getQueryId() {
-        return this.queryId;
-    }
-
-    public void setQueryId(String queryId) {
-        this.queryId = queryId;
-    }
-
-    public String getExecutionEngine() {
-        return this.executionEngine;
-    }
-
-    public void setExecutionEngine(String executionEngine) {
-        this.executionEngine = executionEngine;
-    }
-
-    public String getHiveId() {
-        return this.hiveId;
-    }
-
-    public void setHiveId(String hiveId) {
-        this.hiveId = hiveId;
-    }
-
-    public boolean getSuccess() {
-        return this.success;
-    }
-
-    public void setSuccess(boolean success) {
-        this.success = success;
-    }
-
-    public boolean getFailed() {
-        return this.failed;
-    }
-
-    public void setFailed(boolean failed) {
-        this.failed = failed;
-    }
-
-
-    public String getTableName() {
-        return this.tableName;
-    }
-
-    public void setTableName(String tableName) {
-        this.tableName = tableName;
-    }
-
-
-    public String getAction() {
-        return this.action;
-    }
-
-    public void setAction(String action) {
-        this.action = action;
-    }
-
-    public String getDatabaseName() {
-        return this.databaseName;
-    }
-
-    public void setDatabaseName(String databaseName) {
-        this.databaseName = databaseName;
-    }
-
-    public String getTableLocation() {
-        return this.tableLocation;
-    }
-
-    public void setTableLocation(String tableLocation) {
-        this.tableLocation = tableLocation;
-    }
-
-    public String getUser() {
-        return this.user;
-    }
-
-    public void setUser(String user) {
-        this.user = user;
-    }
-
-    public String getQueryStartTime() {
-        return this.queryStartTime;
-    }
-
-    public void setQueryStartTime(String queryStartTime) {
-        this.queryStartTime = queryStartTime;
-    }
-
-    public String getQueryEndTime() {
-        return this.queryEndTime;
-    }
-
-    public void setQueryEndTime(String queryEndTime) {
-        this.queryEndTime = queryEndTime;
-    }
-
-    public String getQuery() {
-        return this.query;
-    }
-
-    public void setQuery(String query) {
-        this.query = query;
-    }
-
-    public ArrayList<SourceTables> getSourceTables() {
-        return this.sourceTables;
-    }
-
-    public void setSourceTables(ArrayList<SourceTables> sourceTables) {
-        this.sourceTables = sourceTables;
-    }
-
-    public ArrayList<QueryColumns> getQueryColumns() {
-        return this.queryColumns;
-    }
-
-    public void setQueryColumns(ArrayList<QueryColumns> queryColumns) {
-        this.queryColumns = queryColumns;
-    }
-
-
-    public ArrayList<WhereClause> getWhereClause() {
-        return this.whereClause;
-    }
-
-    public void setWhereClause(ArrayList<WhereClause> whereClause) {
-        this.whereClause = whereClause;
-    }
-
-
-    public ArrayList<GroupBy> getGroupBy() {
-        return this.groupBy;
-    }
-
-    public void setGroupBy(ArrayList<GroupBy> groupBy) {
-        this.groupBy = groupBy;
-    }
-
-    public ArrayList<CreateColumns> getCreateColumns() {
-        return this.createColumns;
-    }
-
-    public void setCreateColumns(ArrayList<CreateColumns> createColumns) {
-        this.createColumns = createColumns;
-    }
-
-    public class SourceTables {
-        public String tableName;
-        public String tableAlias;
-        public String databaseName;
-
-        public String getTableName() {
-            return this.tableName;
-        }
-
-        public void setTableName(String tableName) {
-            this.tableName = tableName;
-        }
-
-        public String getTableAlias() {
-            return this.tableAlias;
-        }
-
-        public void setTableAlias(String tableAlias) {
-            this.tableAlias = tableAlias;
-        }
-
-
-        public String getDatabaseName() {
-            return this.databaseName;
-        }
-
-        public void setDatabaseName(String databaseName) {
-            this.databaseName = databaseName;
-        }
-    }
-
-    public class QueryColumns {
-        public String tbAliasOrName;
-        public String columnName;
-        public String columnAlias;
-        public String columnFunction;
-        public String columnDistinctFunction;
-
-        public String getTbAliasOrName() {
-            return this.tbAliasOrName;
-        }
-
-        public void setTbAliasOrName(String tbAliasOrName) {
-            this.tbAliasOrName = tbAliasOrName;
-        }
-
-        public String getColumnName() {
-            return this.columnName;
-        }
-
-        public void setColumnName(String columnName) {
-            this.columnName = columnName;
-        }
-
-        public String getColumnAlias() {
-            return this.columnAlias;
-        }
-
-        public void setColumnAlias(String columnAlias) {
-            this.columnAlias = columnAlias;
-        }
-
-
-        public String getColumnFunction() {
-            return this.columnFunction;
-        }
-
-        public void setColumnFunction(String columnFunction) {
-            this.columnFunction = columnFunction;
-        }
-
-        public String getColumnDistinctFunction() {
-            return this.columnDistinctFunction;
-        }
-
-        public void setColumnDistinctFunction(String columnDistinctFunction) {
-            this.columnDistinctFunction = columnDistinctFunction;
-        }
-    }
-
-    public class GroupBy {
-        public String tbAliasOrName;
-        public String columnName;
-
-        public String getTbAliasOrName() {
-            return this.tbAliasOrName;
-        }
-
-        public void setTbAliasOrName(String tbAliasOrName) {
-            this.tbAliasOrName = tbAliasOrName;
-        }
-
-        public String getColumnName() {
-            return this.columnName;
-        }
-
-        public void setColumnName(String columnName) {
-            this.columnName = columnName;
-        }
-    }
-
-    public class WhereClause {
-        public String tbAliasOrName;
-        public String columnCondition;
-        public String columnName;
-        public String columnOperator;
-        public String columnValue;
-        public ArrayList<ColumnValueIn> columnValueIn;
-
-
-        public String getColumnCondition() {
-            return this.columnCondition;
-        }
-
-        public void setColumnCondition(String columnCondition) {
-            this.columnCondition = columnCondition;
-        }
-
-        public String getTbAliasOrName() {
-            return this.tbAliasOrName;
-        }
-
-        public void setTbAliasOrName(String tbAliasOrName) {
-            this.tbAliasOrName = tbAliasOrName;
-        }
-
-        public String getColumnName() {
-            return this.columnName;
-        }
-
-        public void setColumnName(String columnName) {
-            this.columnName = columnName;
-        }
-
-        public String getColumnOperator() {
-            return this.columnOperator;
-        }
-
-        public void setColumnOperator(String columnOperator) {
-            this.columnOperator = columnOperator;
-        }
-
-
-        public String getColumnValue() {
-            return this.columnValue;
-        }
-
-        public void setColumnValue(String columnValue) {
-            this.columnValue = columnValue;
-        }
-
-
-        public ArrayList<ColumnValueIn> getColumnValueIn() {
-            return this.columnValueIn;
-        }
-
-        public void setColumnValueIn(ArrayList<ColumnValueIn> columnValueIn) {
-            this.columnValueIn = columnValueIn;
-        }
-
-
-    }
-
-    public class CreateColumns {
-        public String columnName;
-        public String columnType;
-
-        public String getColumnName() {
-            return this.columnName;
-        }
-
-        public void setColumnName(String columnName) {
-            this.columnName = columnName;
-        }
-
-        public String getColumnType() {
-            return this.columnType;
-        }
-
-        public void setColumnType(String columnType) {
-            this.columnType = columnType;
-        }
-    }
-
-    public class ColumnValueIn {
-        public String columnValueIn;
-
-        public String getColumnValueIn() {
-            return this.columnValueIn;
-        }
-
-        public void setColumnValueIn(String columnValueIn) {
-            this.columnValueIn = columnValueIn;
-        }
-
-
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfo.java
----------------------------------------------------------------------
diff --git a/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfo.java b/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfo.java
deleted file mode 100755
index 379f82b..0000000
--- a/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfo.java
+++ /dev/null
@@ -1,535 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.bridge.hivelineage.hook;
-
-import com.google.gson.Gson;
-import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.CreateColumns;
-import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.GroupBy;
-import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.QueryColumns;
-import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.SourceTables;
-import org.apache.atlas.bridge.hivelineage.hook.HiveLineage.WhereClause;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
-import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
-import org.apache.hadoop.hive.ql.lib.Dispatcher;
-import org.apache.hadoop.hive.ql.lib.GraphWalker;
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.lib.NodeProcessor;
-import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
-import org.apache.hadoop.hive.ql.lib.Rule;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-import org.apache.hadoop.hive.ql.parse.ParseDriver;
-import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.log4j.ConsoleAppender;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
-import org.apache.log4j.Logger;
-import org.apache.log4j.PatternLayout;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Stack;
-
-/**
- *
- * This class prints out the lineage info. It takes sql as input and prints
- * lineage info. Currently this prints only input and output tables for a given
- * sql. Later we can expand to add join tables etc.
- *
- */
-public class HiveLineageInfo implements NodeProcessor {
-
-    private final Log LOG = LogFactory.getLog(HiveLineageInfo.class.getName());
-    public Map<Integer, String> queryMap;
-    public Integer counter = 0;
-    public HiveLineage hlb = new HiveLineage();
-    ;
-    public ArrayList<SourceTables> sourceTables;
-    public ArrayList<QueryColumns> queryColumns;
-    public ArrayList<GroupBy> groupBy;
-    public ArrayList<WhereClause> whereClause;
-    public ArrayList<CreateColumns> createColumns;
-
-    //Main method to run tests and return json/gson feed from a query
-    public static void main(String[] args) throws IOException, ParseException,
-            SemanticException {
-
-        String query = args[0];
-        ConsoleAppender console = new ConsoleAppender(); //create appender
-        //configure the appender
-        String PATTERN = "%d [%p|%c|%C{1}] %m%n";
-        console.setLayout(new PatternLayout(PATTERN));
-        console.setThreshold(Level.DEBUG);
-        console.activateOptions();
-        //add appender to any Logger (here is root)
-        Logger.getRootLogger().addAppender(console);
-        LogManager.getRootLogger().setLevel(Level.DEBUG);
-
-        HiveLineageInfo lep = new HiveLineageInfo();
-        lep.getLineageInfo(query);
-        Gson gson = new Gson();
-        String jsonOut = gson.toJson(lep.getHLBean());
-        System.out.println("GSON/JSON Generate :: " + jsonOut);
-
-    }
-
-    /**
-     * @return Custom HiveLineageBean data to be passed to GSON parsert
-     */
-
-
-    public HiveLineage getHLBean() {
-        return hlb;
-    }
-
-    /**
-     * Implements the process method for the NodeProcessor interface.
-     */
-    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
-                          Object... nodeOutputs) throws SemanticException {
-        ASTNode pt = (ASTNode) nd;
-
-	/*
-    * Check the 1st-level children and do simple semantic checks: 1) CTLT and
-	* CTAS should not coexists. 2) CTLT or CTAS should not coexists with column
-	* list (target table schema). 3) CTAS does not support partitioning (for
-	* now).
-	*/
-
-        switch (pt.getToken().getType()) {
-
-            case HiveParser.TOK_FROM:
-                LOG.debug("From Table Dump: " + pt.dump());
-                fromTableDump(pt);
-                break;
-
-            case HiveParser.TOK_SELECT:
-                LOG.debug("Column Dump: " + pt.dump());
-                columnTableDump(pt);
-                break;
-
-            case HiveParser.TOK_SELECTDI:
-                LOG.debug("Column Dump: " + pt.dump());
-                columnTableDump(pt);
-                break;
-
-            case HiveParser.TOK_CREATETABLE:
-                createColumns = new ArrayList<CreateColumns>();
-                LOG.debug("CREATABLE DUMP: " + pt.dump());
-                createTableDump(pt);
-                break;
-
-            case HiveParser.TOK_CREATEVIEW:
-                createColumns = new ArrayList<CreateColumns>();
-                LOG.debug("CREATEVIEW DUMP: " + pt.dump());
-                createTableDump(pt);
-                break;
-	/*
-	 * Currently disabling processing of WHERE and GROUPBY NO VALUE RIGHT NOW
-	 *
-
-    	case HiveParser.TOK_WHERE:
-    		whereClause = new ArrayList<WhereClause>();
-            LOG.debug("WHERE CLAUSE DUMP: "+pt.dump());
-            whereDump(pt);
-            this.hlb.setWhereClause(whereClause);
-    	break;
-
-    	case HiveParser.TOK_GROUPBY:
-    		groupBy = new ArrayList<GroupBy>();
-            LOG.debug("GROUPBY CLAUSE DUMP: "+pt.dump());
-    		groupByDump(pt);
-    		this.hlb.setGroupBy(groupBy);
-    	break;
-
-	*/
-        }
-        return null;
-    }
-
-    /**
-     *  Walks the whereTree called by processWalker
-     */
-    public void whereDump(ASTNode nodeIn) {
-        counter = 0;
-        wdump(nodeIn);
-    }
-
-    /**
-     *  Walks the Where Tree called by whereDump
-     */
-    private void wdump(ASTNode nodeIn) {
-        boolean parseChild = true;
-        if (nodeIn.getType() == HiveParser.TOK_TABLE_OR_COL) {
-            WhereClause whreClse = hlb.new WhereClause();
-            if (nodeIn.getParent().getText().equalsIgnoreCase(".")) {
-                ASTNode checkOrAnd = (ASTNode) nodeIn.getParent().getParent().getChild(1)
-                        .getParent().getParent();
-                if (checkOrAnd.getType() == HiveParser.KW_AND ||
-                        checkOrAnd.getType() == HiveParser.KW_OR) {
-                    LOG.debug("WHERE:: " + checkOrAnd.getText());
-                    whreClse.setColumnOperator(checkOrAnd.getText());
-                }
-                LOG.debug("Table Alias:: " + nodeIn.getChild(0).getText());
-                whreClse.setTbAliasOrName(nodeIn.getChild(0).getText());
-                LOG.debug("Delimiter:: " + nodeIn.getParent().getText());
-                LOG.debug("Column:: " + nodeIn.getParent().getChild(1).getText());
-                whreClse.setColumnName(nodeIn.getParent().getChild(1).getText());
-                LOG.debug("Column Qualifer:: " +
-                        nodeIn.getParent().getParent().getChild(1).getParent().getText());
-                whreClse.setColumnOperator(
-                        nodeIn.getParent().getParent().getChild(1).getParent().getText());
-                LOG.debug("Column Value:: " + nodeIn.getParent().getParent().getChild(1).getText());
-                whreClse.setColumnValue(nodeIn.getParent().getParent().getChild(1).getText());
-            } else {
-                ASTNode checkOrAnd = (ASTNode) nodeIn.getParent().getParent().getChild(1)
-                        .getParent();
-                if (checkOrAnd.getType() == HiveParser.KW_AND ||
-                        checkOrAnd.getType() == HiveParser.KW_OR) {
-                    LOG.debug("WHERE:: " + checkOrAnd.getText());
-                    whreClse.setColumnOperator(checkOrAnd.getText());
-                }
-                LOG.debug("Column:: = " + nodeIn.getChild(0).getText());
-                whreClse.setColumnName(nodeIn.getChild(0).getText());
-                //LOG.info("Delimiter "+nodeIn.getParent().getText());
-                LOG.debug("Column Qualifer:: " +
-                        nodeIn.getParent().getChild(1).getParent().getText());
-                whreClse.setColumnOperator(nodeIn.getParent().getChild(1).getParent().getText());
-                LOG.debug("Column Value:: " + nodeIn.getParent().getChild(1).getText());
-                whreClse.setColumnValue(nodeIn.getParent().getChild(1).getText());
-            }
-            whereClause.add(whreClse);
-        }
-        if (parseChild) {
-            int childCount = nodeIn.getChildCount();
-            if (childCount != 0) {
-                for (int numr = 0; numr < childCount; numr++) {
-                    wdump((ASTNode) nodeIn.getChild(numr));
-                }
-            }
-        }
-    }
-
-    /**
-     *  Walks the GroupByTree called by processWalker
-     */
-    public void groupByDump(ASTNode nodeIn) {
-        counter = 0;
-        gdump(nodeIn);
-    }
-
-    /**
-     *  Walks the GroupBy Tree called by groupByDump
-     */
-    private void gdump(ASTNode nodeIn) {
-        boolean parseChild = true;
-        if (nodeIn.getType() == HiveParser.TOK_TABLE_OR_COL) {
-            GroupBy grpBy = hlb.new GroupBy();
-            ASTNode parentNode = (ASTNode) nodeIn.getParent();
-            if (parentNode.getText().equalsIgnoreCase(".")) {
-                LOG.debug("GroupBy TableAlias: " + nodeIn.getChild(0).getText());
-                grpBy.setTbAliasOrName(nodeIn.getChild(0).getText());
-                LOG.debug("GroupBy Column:: " + parentNode.getChild(1).getText());
-                grpBy.setColumnName(parentNode.getChild(1).getText());
-            } else {
-                LOG.debug("GroupBy Column: " + nodeIn.getChild(0).getText());
-                grpBy.setColumnName(nodeIn.getChild(0).getText());
-            }
-            groupBy.add(grpBy);
-        }
-        if (parseChild) {
-            int childCount = nodeIn.getChildCount();
-            if (childCount != 0) {
-                for (int numr = 0; numr < childCount; numr++) {
-                    gdump((ASTNode) nodeIn.getChild(numr));
-                }
-            }
-        }
-    }
-
-    /**
-     *  Walks the CreateTable Tree called by processWalker
-     */
-
-    public void createTableDump(ASTNode nodeIn) {
-        counter = 0;
-        if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME) != null &&
-                nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
-            LOG.info("Create TableName:: " +
-                    nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getText());
-            if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChildCount() == 2) {
-                LOG.debug("To DataBaseName:: " +
-                        nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
-                hlb.setDatabaseName(
-                        nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
-                LOG.debug("To TableName:: " +
-                        nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(1).getText());
-                hlb.setTableName(
-                        nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(1).getText());
-            } else {
-                LOG.debug("To TableName:: " +
-                        nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
-                hlb.setTableName(
-                        nodeIn.getFirstChildWithType(HiveParser.TOK_TABNAME).getChild(0).getText());
-            }
-        }
-        if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION) != null &&
-                nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
-            LOG.debug("Create Table Location:: " +
-                    nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION).getText());
-            hlb.setTableLocation(
-                    nodeIn.getFirstChildWithType(HiveParser.TOK_TABLELOCATION).getChild(0)
-                            .getText());
-        }
-        if (nodeIn.getFirstChildWithType(HiveParser.TOK_TABCOLLIST) != null &&
-                nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
-            ctdump((ASTNode) nodeIn.getFirstChildWithType(HiveParser.TOK_TABCOLLIST).getParent());
-            hlb.setCreateColumns(createColumns);
-        }
-    }
-
-    /**
-     *  Walks the CreateTable Tree called by createTableDump
-     */
-    private void ctdump(ASTNode nodeIn) {
-        boolean parseChild = true;
-        if (nodeIn.getType() == HiveParser.TOK_TABCOL) {
-            CreateColumns crtClmns = hlb.new CreateColumns();
-            LOG.debug("Create Column Name:: " + nodeIn.getChild(0).getText());
-            crtClmns.setColumnName(nodeIn.getChild(0).getText());
-            LOG.debug("Create Column Type:: " + nodeIn.getChild(1).getText());
-            crtClmns.setColumnType(nodeIn.getChild(1).getText());
-            createColumns.add(crtClmns);
-        }
-        if (parseChild) {
-            int childCount = nodeIn.getChildCount();
-            if (childCount != 0) {
-                for (int numr = 0; numr < childCount; numr++) {
-                    ctdump((ASTNode) nodeIn.getChild(numr));
-                }
-            }
-        }
-    }
-
-    /**
-     *  Walks the fromTable Tree called by processWalker
-     */
-
-    public void fromTableDump(ASTNode nodeIn) {
-        counter = 0;
-        ftdump(nodeIn);
-    }
-
-    /**
-     *  Walks the fromTable Tree called by fromTableDump
-     */
-    private void ftdump(ASTNode nodeIn) {
-        boolean parseChild = true;
-        if (nodeIn.getType() == HiveParser.TOK_TABNAME &&
-                nodeIn.getParent().getType() == HiveParser.TOK_TABREF &&
-                nodeIn.getAncestor(HiveParser.TOK_WHERE) == null) {
-            SourceTables hlbSbls = hlb.new SourceTables();
-            if (nodeIn.getChildCount() == 2) {
-                LOG.debug("From DBName:: " + nodeIn.getChild(0).getText());
-                hlbSbls.setDatabaseName(nodeIn.getChild(0).getText());
-                LOG.debug("From TableName:: " + nodeIn.getChild(1).getText());
-                hlbSbls.setTableName(nodeIn.getChild(1).getText());
-            } else {
-                LOG.debug("From TableName:: " + nodeIn.getChild(0).getText());
-                hlbSbls.setTableName(nodeIn.getChild(0).getText());
-
-            }
-            if (nodeIn.getType() == HiveParser.TOK_TABNAME &&
-                    nodeIn.getParent().getChild(1) != null) {
-                LOG.debug("From DB/Table Alias:: " + nodeIn.getParent().getChild(1).getText());
-                hlbSbls.setTableAlias(nodeIn.getParent().getChild(1).getText());
-            }
-            sourceTables.add(hlbSbls);
-        }
-        if (parseChild) {
-            int childCount = nodeIn.getChildCount();
-            if (childCount != 0) {
-                for (int numr = 0; numr < childCount; numr++) {
-                    ftdump((ASTNode) nodeIn.getChild(numr));
-                }
-            }
-        }
-    }
-
-    /**
-     *  Walks the column Tree called by processWalker
-     */
-
-    public void columnTableDump(ASTNode nodeIn) {
-        counter = 0;
-        clmnTdump(nodeIn);
-    }
-
-    /**
-     *  Walks the columnDump Tree called by columnTableDump
-     */
-    private void clmnTdump(ASTNode nodeIn) {
-        boolean parseChild = true;
-        if (nodeIn.getType() == HiveParser.TOK_TABLE_OR_COL &&
-                nodeIn.getAncestor(HiveParser.TOK_SELEXPR) != null &&
-                !(nodeIn.hasAncestor(HiveParser.TOK_WHERE))) {
-            QueryColumns qclmns = hlb.new QueryColumns();
-            if (nodeIn.getAncestor(HiveParser.TOK_FUNCTION) != null &&
-                    nodeIn.getAncestor(HiveParser.TOK_SELEXPR) != null) {
-                LOG.debug("Function Query:: " +
-                        nodeIn.getAncestor(HiveParser.TOK_FUNCTION).getChild(0).getText());
-                qclmns.setColumnFunction(
-                        nodeIn.getAncestor(HiveParser.TOK_FUNCTION).getChild(0).getText());
-            }
-            if (nodeIn.getAncestor(HiveParser.TOK_FUNCTIONDI) != null &&
-                    nodeIn.getAncestor(HiveParser.TOK_SELEXPR) != null) {
-                LOG.debug("Function Distinct Query:: " +
-                        nodeIn.getAncestor(HiveParser.TOK_FUNCTIONDI).getChild(0).getText());
-                qclmns.setColumnDistinctFunction(
-                        nodeIn.getAncestor(HiveParser.TOK_FUNCTIONDI).getChild(0).getText());
-            }
-            if (nodeIn.getParent().getText().equalsIgnoreCase(".")) {
-                LOG.debug("Table Name/Alias:: " + nodeIn.getChild(0).getText());
-                qclmns.setTbAliasOrName(nodeIn.getChild(0).getText());
-                LOG.debug("Column:: " + nodeIn.getParent().getChild(1).getText());
-                qclmns.setColumnName(nodeIn.getParent().getChild(1).getText());
-                if (nodeIn.getAncestor(HiveParser.TOK_SELEXPR).getChild(1) != null) {
-                    LOG.debug("Column Alias:: " +
-                            nodeIn.getAncestor(HiveParser.TOK_SELEXPR).getChild(1).getText());
-                    qclmns.setColumnAlias(
-                            nodeIn.getAncestor(HiveParser.TOK_SELEXPR).getChild(1).getText());
-                }
-            } else {
-                LOG.debug("Column:: " + nodeIn.getChild(0).getText());
-                qclmns.setColumnName(nodeIn.getChild(0).getText());
-                if ((nodeIn.getParent().getChild(1) != null &&
-                        nodeIn.getParent().getChild(1).getType() != HiveParser.TOK_TABLE_OR_COL)) {
-                    LOG.debug("Column Alias:: " + nodeIn.getParent().getChild(1).getText());
-                    qclmns.setColumnAlias(nodeIn.getParent().getChild(1).getText());
-                }
-            }
-            if (qclmns.getColumnName() != null) {
-                queryColumns.add(qclmns);
-            }
-        }
-        if (parseChild) {
-            int childCount = nodeIn.getChildCount();
-            if (childCount != 0) {
-                for (int numr = 0; numr < childCount; numr++) {
-                    clmnTdump((ASTNode) nodeIn.getChild(numr));
-                }
-            }
-        }
-    }
-
-    /**
-     * parses given query and gets the lineage info.
-     *
-     * @param query
-     * @throws ParseException
-     */
-    public void getLineageInfo(String query) throws ParseException,
-            SemanticException {
-
-	 /*
-	  * Get the AST tree
-	  */
-        ParseDriver pd = new ParseDriver();
-        ASTNode tree = pd.parse(query);
-        LOG.info("DUMP TREE: " + tree.dump());
-        if (tree.getChild(0).getType() == HiveParser.TOK_DROPDATABASE) {
-            hlb.setAction("drop_database");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_CREATEDATABASE) {
-            hlb.setAction("create_database");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_CREATETABLE) {
-            hlb.setAction("create_table");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_CREATEVIEW) {
-            hlb.setAction("create_view");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_DROPTABLE) {
-            hlb.setAction("drop_table");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_INSERT) {
-            hlb.setAction("insert");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_INSERT_INTO) {
-            hlb.setAction("insert_into");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_DROPVIEW) {
-            hlb.setAction("drop_view");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_SHOWDATABASES) {
-            hlb.setAction("show_databases");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_SHOWTABLES) {
-            hlb.setAction("show_tables");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_ALTERVIEW_RENAME) {
-            hlb.setAction("alter_view_rename");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_ALTERTABLE_RENAME) {
-            hlb.setAction("alter_table_rename");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_ANALYZE) {
-            hlb.setAction("analyze");
-        }
-        if (tree.getChild(0).getType() == HiveParser.TOK_QUERY) {
-            hlb.setAction("select");
-        }
-
-        while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
-            tree = (ASTNode) tree.getChild(0);
-        }
-        sourceTables = new ArrayList<SourceTables>();
-        queryColumns = new ArrayList<QueryColumns>();
-
-
-	 /*
-	  * initialize Event Processor and dispatcher.
-	  */
-
-        // create a walker which walks the tree in a DFS manner while maintaining
-        // the operator stack. The dispatcher
-        // generates the plan from the operator tree
-        Map<Rule, NodeProcessor> rules = new LinkedHashMap<Rule, NodeProcessor>();
-        // The dispatcher fires the processor corresponding to the closest matching
-        // rule and passes the context along
-        Dispatcher disp = new DefaultRuleDispatcher(this, rules, null);
-        GraphWalker ogw = new DefaultGraphWalker(disp);
-        // Create a list of topop nodes
-        ArrayList<Node> topNodes = new ArrayList<Node>();
-        topNodes.add(tree);
-        ogw.startWalking(topNodes, null);
-        if (!(sourceTables.isEmpty())) {
-            this.hlb.setSourceTables(sourceTables);
-        }
-
-        if (!(queryColumns.isEmpty())) {
-            this.hlb.setQueryColumns(queryColumns);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/Hook.java
----------------------------------------------------------------------
diff --git a/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/Hook.java b/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/Hook.java
deleted file mode 100755
index 8ce316d..0000000
--- a/addons/metadata-bridge-parent/metadata-bridge-hive/src/main/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/Hook.java
+++ /dev/null
@@ -1,317 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.bridge.hivelineage.hook;
-
-
-import com.google.gson.Gson;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.exec.ExplainTask;
-import org.apache.hadoop.hive.ql.exec.Utilities;
-import org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext;
-import org.apache.hadoop.hive.ql.hooks.HookContext;
-import org.apache.hadoop.hive.ql.hooks.ReadEntity;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.util.StringUtils;
-
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.TrustManager;
-import javax.net.ssl.X509TrustManager;
-import java.io.DataOutputStream;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.net.HttpURLConnection;
-import java.net.URL;
-import java.net.URLEncoder;
-import java.util.Set;
-
-/**
- * DGC Hook sends query + plan info to DGCCollector Service. To enable (hadoop 2.4 and up) set
- * hive.exec.pre.hooks/hive.exec.post.hooks/hive.exec.failure.hooks to include this class.
- */
-
-public class Hook implements ExecuteWithHookContext {
-
-    private static final Log LOG = LogFactory.getLog(Hook.class.getName());
-    private static final String METADATA_HOST = "localhost";
-    private static final int METADATA_PORT = 20810;
-    private static final String METADATA_PATH = "/entities/submit/HiveLineage";
-    private HiveLineage hlb;
-
-    @Override
-    public void run(HookContext hookContext) throws Exception {
-        long currentTime = System.currentTimeMillis();
-        String executionEngine = null;
-        try {
-            QueryPlan plan = hookContext.getQueryPlan();
-            if (plan == null) {
-                return;
-            }
-            ExplainTask explain = new ExplainTask();
-            explain.initialize(hookContext.getConf(), plan, null);
-            String queryId = plan.getQueryId();
-            String queryStartTime = plan.getQueryStartTime().toString();
-            String user = hookContext.getUgi().getUserName();
-            String query = plan.getQueryStr();
-            int numMrJobs = Utilities.getMRTasks(plan.getRootTasks()).size();
-            int numTezJobs = Utilities.getTezTasks(plan.getRootTasks()).size();
-            String hiveId = explain.getId();
-            SessionState sess = SessionState.get();
-
-            if (numTezJobs > 0) {
-                executionEngine = "tez";
-            }
-            if (numMrJobs > 0) {
-                executionEngine = "mr";
-            }
-            hiveId = sess.getSessionId();
-            String defaultdb = null;
-
-
-            switch (hookContext.getHookType()) {
-                case PRE_EXEC_HOOK:
-                    Set<ReadEntity> db_pre = hookContext.getInputs();
-                    for (Object o : db_pre) {
-                        LOG.debug("DB:Table=" + o.toString());
-                        defaultdb = o.toString().split("@")[0];
-                    }
-
-                    currentTime = System.currentTimeMillis();
-                    HiveLineageInfo lep_pre = new HiveLineageInfo();
-                    lep_pre.getLineageInfo(query);
-                    hlb = lep_pre.getHLBean();
-                    hlb.setDatabaseName(defaultdb);
-                    hlb.setQueryEndTime(Long.toString(currentTime));
-                    hlb.setQueryId(queryId);
-                    hlb.setQuery(query);
-                    hlb.setUser(user);
-                    hlb.setHiveId(hiveId);
-                    hlb.setSuccess(false);
-                    if (executionEngine != null) {
-                        if (executionEngine.equalsIgnoreCase("mr")) {
-                            hlb.setExecutionEngine("mapreduce");
-                        }
-                        if (executionEngine.equalsIgnoreCase("tez")) {
-                            hlb.setExecutionEngine("tez");
-                        }
-                        if (executionEngine.equalsIgnoreCase("spark")) {
-                            hlb.setExecutionEngine("spark");
-                        }
-                    } else {
-                        hlb.setExecutionEngine("local");
-                    }
-                    hlb.setQueryStartTime(queryStartTime);
-                    fireAndForget(hookContext.getConf(), hlb, queryId);
-
-                    break;
-                case POST_EXEC_HOOK:
-                    Set<ReadEntity> db_post = hookContext.getInputs();
-                    for (Object o : db_post) {
-                        LOG.debug("DB:Table=" + o.toString());
-                        defaultdb = o.toString().split("@")[0];
-                    }
-                    currentTime = System.currentTimeMillis();
-                    HiveLineageInfo lep_post = new HiveLineageInfo();
-                    lep_post.getLineageInfo(query);
-                    hlb = lep_post.getHLBean();
-                    hlb.setDatabaseName(defaultdb);
-                    hlb.setQueryEndTime(Long.toString(currentTime));
-                    hlb.setQueryId(queryId);
-                    hlb.setQuery(query);
-                    hlb.setUser(user);
-                    hlb.setQueryStartTime(queryStartTime);
-                    hlb.setSuccess(true);
-                    hlb.setHiveId(hiveId);
-                    if (executionEngine != null) {
-                        if (executionEngine.equalsIgnoreCase("mr")) {
-                            hlb.setExecutionEngine("mapreduce");
-                        }
-                        if (executionEngine.equalsIgnoreCase("tez")) {
-                            hlb.setExecutionEngine("tez");
-                        }
-                        if (executionEngine.equalsIgnoreCase("spark")) {
-                            hlb.setExecutionEngine("spark");
-                        }
-                    } else {
-                        hlb.setExecutionEngine("local");
-                    }
-                    fireAndForget(hookContext.getConf(), hlb, queryId);
-
-                    break;
-                case ON_FAILURE_HOOK:
-                    Set<ReadEntity> db_fail = hookContext.getInputs();
-                    for (Object o : db_fail) {
-                        LOG.debug("DB:Table=" + o.toString());
-                        defaultdb = o.toString().split("@")[0];
-                    }
-                    HiveLineageInfo lep_failed = new HiveLineageInfo();
-                    lep_failed.getLineageInfo(query);
-                    hlb = lep_failed.getHLBean();
-                    hlb.setDatabaseName(defaultdb);
-                    hlb.setQueryEndTime(Long.toString(currentTime));
-                    hlb.setQueryId(queryId);
-                    hlb.setQuery(query);
-                    hlb.setUser(user);
-                    hlb.setQueryStartTime(queryStartTime);
-                    hlb.setSuccess(false);
-                    hlb.setFailed(true);
-                    hlb.setHiveId(hiveId);
-                    if (executionEngine != null) {
-                        if (executionEngine.equalsIgnoreCase("mr")) {
-                            hlb.setExecutionEngine("mapreduce");
-                        }
-                        if (executionEngine.equalsIgnoreCase("tez")) {
-                            hlb.setExecutionEngine("tez");
-                        }
-                        if (executionEngine.equalsIgnoreCase("spark")) {
-                            hlb.setExecutionEngine("spark");
-                        }
-                    } else {
-                        hlb.setExecutionEngine("local");
-                    }
-                    fireAndForget(hookContext.getConf(), hlb, queryId);
-                    break;
-                default:
-                    //ignore
-                    break;
-            }
-        } catch (Exception e) {
-            LOG.info("Failed to submit plan to DGC: " + StringUtils.stringifyException(e));
-        }
-    }
-
-    public void fireAndForget(Configuration conf, HiveLineage hookData, String queryId)
-    throws Exception {
-        String postUri = String
-                .format("http://%s:%s%s", METADATA_HOST, METADATA_PORT, METADATA_PATH);
-        if (conf.getTrimmed("atlas.hive.hook.uri") != null) {
-            postUri = conf.getTrimmed("atlas.hive.hook.uri");
-        }
-        Gson gson = new Gson();
-        String gsonString = gson.toJson(hookData);
-        LOG.debug("GSON String: " + gsonString);
-        String encodedGsonQuery = URLEncoder.encode(gsonString, "UTF-8");
-        String encodedQueryId = URLEncoder.encode(queryId, "UTF-8");
-        String postData = "hookdata=" + encodedGsonQuery + "&queryid=" + encodedQueryId;
-        // Create a trust manager that does not validate certificate chains
-        if (postUri.contains("https:")) {
-            TrustManager[] trustAllCerts = new TrustManager[]{
-                    new X509TrustManager() {
-                        public java.security.cert.X509Certificate[] getAcceptedIssuers() {
-                            return null;
-                        }
-
-                        public void checkClientTrusted(
-                                java.security.cert.X509Certificate[] certs, String authType) {
-                        }
-
-                        public void checkServerTrusted(
-                                java.security.cert.X509Certificate[] certs, String authType) {
-                        }
-                    }
-            };
-            // Install the all-trusting trust manager
-            try {
-                SSLContext sc = SSLContext.getInstance("SSL");
-                sc.init(null, trustAllCerts, new java.security.SecureRandom());
-                HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        URL url = new URL(postUri);
-        LOG.debug("Post URI: " + postUri);
-        DataOutputStream wr = null;
-        //HttpURLConnection urlcon = null;
-        if (postUri.contains("https:")) {
-            HttpsURLConnection urlcon = null;
-            urlcon = (HttpsURLConnection) url.openConnection();
-            urlcon.setRequestMethod("POST");
-            urlcon.setRequestProperty("X-Requested-By", "HiveHook");
-            urlcon.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
-            urlcon.setUseCaches(false);
-            urlcon.setDoInput(true);
-            urlcon.setDoOutput(true);
-            wr = new DataOutputStream(urlcon.getOutputStream());
-            LOG.debug("PostString: " + postData);
-            //wr.writeBytes(postString.);
-            wr.write(postData.getBytes());
-
-            wr.flush();
-            wr.close();
-
-
-            InputStream is = urlcon.getInputStream();
-            InputStreamReader isr = new InputStreamReader(is);
-
-            int numCharsRead;
-            char[] charArray = new char[1024];
-            StringBuffer sb = new StringBuffer();
-            while ((numCharsRead = isr.read(charArray)) > 0) {
-                sb.append(charArray, 0, numCharsRead);
-            }
-            String result = sb.toString();
-
-            LOG.debug("Post Response: " + result);
-            isr.close();
-            is.close();
-            urlcon.disconnect();
-        } else {
-            HttpURLConnection urlcon = null;
-            urlcon = (HttpURLConnection) url.openConnection();
-            urlcon.setRequestMethod("POST");
-            urlcon.setRequestProperty("X-Requested-By", "HiveHook");
-            urlcon.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
-            urlcon.setUseCaches(false);
-            urlcon.setDoInput(true);
-            urlcon.setDoOutput(true);
-            wr = new DataOutputStream(urlcon.getOutputStream());
-            LOG.debug("PostString: " + postData);
-            //wr.writeBytes(postString.);
-            wr.write(postData.getBytes());
-
-            wr.flush();
-            wr.close();
-
-
-            InputStream is = urlcon.getInputStream();
-            InputStreamReader isr = new InputStreamReader(is);
-
-            int numCharsRead;
-            char[] charArray = new char[1024];
-            StringBuffer sb = new StringBuffer();
-            while ((numCharsRead = isr.read(charArray)) > 0) {
-                sb.append(charArray, 0, numCharsRead);
-            }
-            String result = sb.toString();
-
-            LOG.debug("Post Response: " + result);
-            isr.close();
-            is.close();
-            urlcon.disconnect();
-        }
-
-
-    }
-
-}
-

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/addons/metadata-bridge-parent/metadata-bridge-hive/src/test/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfoTest.java
----------------------------------------------------------------------
diff --git a/addons/metadata-bridge-parent/metadata-bridge-hive/src/test/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfoTest.java b/addons/metadata-bridge-parent/metadata-bridge-hive/src/test/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfoTest.java
deleted file mode 100755
index 3038c99..0000000
--- a/addons/metadata-bridge-parent/metadata-bridge-hive/src/test/java/org/apache/hadoop/metadata/bridge/hivelineage/hook/HiveLineageInfoTest.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.bridge.hivelineage.hook;
-
-import com.google.gson.Gson;
-import org.apache.hadoop.hive.ql.parse.ParseException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-
-public class HiveLineageInfoTest {
-    public static String parseQuery(String query) throws SemanticException,
-    ParseException {
-        HiveLineageInfo lep = new HiveLineageInfo();
-        lep.getLineageInfo(query);
-        Gson gson = new Gson();
-        String jsonOut = gson.toJson(lep.getHLBean());
-        return jsonOut;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 30fab3d..76df53c 100755
--- a/pom.xml
+++ b/pom.xml
@@ -134,7 +134,6 @@
         <module>repository</module>
         <module>webapp</module>
         <module>docs</module>
-        <module>tools</module>
         <module>addons/hive-bridge</module>
     </modules>
 

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/tools/pom.xml
----------------------------------------------------------------------
diff --git a/tools/pom.xml b/tools/pom.xml
deleted file mode 100755
index 90fd396..0000000
--- a/tools/pom.xml
+++ /dev/null
@@ -1,187 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~     http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xmlns="http://maven.apache.org/POM/4.0.0"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <parent>
-        <groupId>org.apache.atlas</groupId>
-        <artifactId>apache-atlas</artifactId>
-        <version>0.1-incubating-SNAPSHOT</version>
-    </parent>
-    <artifactId>atlas-tools</artifactId>
-    <description>Apache Atlas Type System Tools Module</description>
-    <name>Apache Atlas Type System Tools</name>
-    <packaging>jar</packaging>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.atlas</groupId>
-            <artifactId>atlas-typesystem</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.atlas</groupId>
-            <artifactId>atlas-repository</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-compiler</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-reflect</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>jline</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-library</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scala-actors</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scala-lang</groupId>
-            <artifactId>scalap</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalatest</groupId>
-            <artifactId>scalatest_${scala.binary.version}</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>org.scalamacros</groupId>
-            <artifactId>quasiquotes_${scala.binary.version}</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.json4s</groupId>
-            <artifactId>json4s-native_2.10</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.github.nscala-time</groupId>
-            <artifactId>nscala-time_2.10</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.typesafe</groupId>
-            <artifactId>config</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.typesafe.akka</groupId>
-            <artifactId>akka-actor_2.10</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.typesafe.akka</groupId>
-            <artifactId>akka-testkit_2.10</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.typesafe.akka</groupId>
-            <artifactId>akka-slf4j_2.10</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>io.spray</groupId>
-            <artifactId>spray-routing</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>io.spray</groupId>
-            <artifactId>spray-can</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>io.spray</groupId>
-            <artifactId>spray-httpx</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>io.spray</groupId>
-            <artifactId>spray-testkit</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.google.code.gson</groupId>
-            <artifactId>gson</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>it.unimi.dsi</groupId>
-            <artifactId>fastutil</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>com.google.inject</groupId>
-            <artifactId>guice</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.tinkerpop.blueprints</groupId>
-            <artifactId>blueprints-core</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.thinkaurelius.titan</groupId>
-            <artifactId>titan-core</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.thinkaurelius.titan</groupId>
-            <artifactId>titan-es</artifactId>
-        </dependency>
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>net.alchim31.maven</groupId>
-                <artifactId>scala-maven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/tools/src/main/resources/application.conf
----------------------------------------------------------------------
diff --git a/tools/src/main/resources/application.conf b/tools/src/main/resources/application.conf
deleted file mode 100755
index 4ae7028..0000000
--- a/tools/src/main/resources/application.conf
+++ /dev/null
@@ -1,54 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-akka {
-  loglevel = DEBUG
-  stdout-loglevel = DEBUG
-  event-handlers = ["akka.event.Logging$DefaultLogger"]
-  default-dispatcher {
-    fork-join-executor {
-      parallelism-min = 8
-    }
-  }
-  test {
-    timefactor = 1
-  }
-}
-
-spray {
-  can {
-    server {
-      server-header = "Metadata Service"
-    }
-  }
-}
-
-http {
-  host = "0.0.0.0"
-  host = ${?HOST}
-  port = 9140
-  port = ${?PORT}
-}
-
-graphRepo {
-  storage.backend = inmemory
-
-  # Graph Search Index
-  #index.search.backend=elasticsearch
-  #index.search.directory=target/data/es
-  #index.search.elasticsearch.client-only=false
-  #index.search.elasticsearch.local-mode=true
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/tools/src/main/scala/org/apache/atlas/tools/cli/Console.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/atlas/tools/cli/Console.scala b/tools/src/main/scala/org/apache/atlas/tools/cli/Console.scala
deleted file mode 100755
index df10239..0000000
--- a/tools/src/main/scala/org/apache/atlas/tools/cli/Console.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.tools.cli
-
-import org.apache.atlas.repository.memory.MemRepository
-import org.apache.atlas.typesystem.types.TypeSystem
-
-import scala.tools.nsc.Settings
-import scala.tools.nsc.interpreter.{ILoop, IMain}
-
-object Console extends App {
-    val settings = new Settings
-    settings.usejavacp.value = true
-    settings.deprecation.value = true
-    settings.bootclasspath.value += """/Users/hbutani/.m2/repository/org/apache/atlas/1.0-SNAPSHOT/atlas-1.0-SNAPSHOT.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-compiler/2.10.4/scala-compiler-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-reflect/2.10.4/scala-reflect-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/jline/2.10.4/jline-2.10.4.jar:/Users/hbutani/.m2/repository/org/fusesource/jansi/jansi/1.4/jansi-1.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-library/2.10.4/scala-library-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scala-actors/2.10.4/scala-actors-2.10.4.jar:/Users/hbutani/.m2/repository/org/scala-lang/scalap/2.10.4/scalap-2.10.4.jar:/Users/hbutani/.m2/repository/org/scalatest/scalatest_2.10/2.2.0/scalatest_2.10-2.2.0.jar:/Users/hbutani/.m2/repository/org/scalamacros/quasiquotes_2.10/2.0.1/quasiquotes_2.10-2.0.1.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-native_2.10/3.2.11/json4s-native_2.10-3.2.11.jar:/
 Users/hbutani/.m2/repository/org/json4s/json4s-core_2.10/3.2.11/json4s-core_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/org/json4s/json4s-ast_2.10/3.2.11/json4s-ast_2.10-3.2.11.jar:/Users/hbutani/.m2/repository/com/thoughtworks/paranamer/paranamer/2.6/paranamer-2.6.jar:/Users/hbutani/.m2/repository/com/github/nscala-time/nscala-time_2.10/1.6.0/nscala-time_2.10-1.6.0.jar:/Users/hbutani/.m2/repository/joda-time/joda-time/2.5/joda-time-2.5.jar:/Users/hbutani/.m2/repository/org/joda/joda-convert/1.2/joda-convert-1.2.jar:/Users/hbutani/.m2/repository/com/typesafe/config/1.2.1/config-1.2.1.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-actor_2.10/2.3.7/akka-actor_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-testkit_2.10/2.3.7/akka-testkit_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/com/typesafe/akka/akka-slf4j_2.10/2.3.7/akka-slf4j_2.10-2.3.7.jar:/Users/hbutani/.m2/repository/org/slf4j/slf4j-api/1.7.5/slf4j-api-1.7.5.jar:/Users/hbutani/.m2/repository/io
 /spray/spray-routing/1.3.1/spray-routing-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-http/1.3.1/spray-http-1.3.1.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-scala_2.10/1.1.6/parboiled-scala_2.10-1.1.6.jar:/Users/hbutani/.m2/repository/org/parboiled/parboiled-core/1.1.6/parboiled-core-1.1.6.jar:/Users/hbutani/.m2/repository/io/spray/spray-util/1.3.1/spray-util-1.3.1.jar:/Users/hbutani/.m2/repository/com/chuusai/shapeless_2.10/1.2.4/shapeless_2.10-1.2.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-can/1.3.1/spray-can-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-io/1.3.1/spray-io-1.3.1.jar:/Users/hbutani/.m2/repository/io/spray/spray-httpx/1.3.1/spray-httpx-1.3.1.jar:/Users/hbutani/.m2/repository/org/jvnet/mimepull/mimepull/1.9.4/mimepull-1.9.4.jar:/Users/hbutani/.m2/repository/io/spray/spray-testkit/1.3.1/spray-testkit-1.3.1.jar:/Users/hbutani/.m2/repository/com/google/guava/guava/11.0.2/guava-11.0.2.jar:/Users/hbutani/.m2/repository/com/google/c
 ode/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar:/Users/hbutani/.m2/repository/junit/junit/4.10/junit-4.10.jar:/Users/hbutani/.m2/repository/org/hamcrest/hamcrest-core/1.1/hamcrest-core-1.1.jar"""
-
-    val in = new IMain(settings) {
-        override protected def parentClassLoader = settings.getClass.getClassLoader()
-    }
-
-    new SampleILoop().process(settings)
-}
-
-class SampleILoop extends ILoop {
-    val ts: TypeSystem = TypeSystem.getInstance()
-
-    //intp = Console.in
-    val mr: MemRepository = new MemRepository(ts)
-
-    override def prompt = "==> "
-
-    addThunk {
-        intp.beQuietDuring {
-            intp.addImports("java.lang.Math._")
-            intp.addImports("org.json4s.native.Serialization.{read, write => swrite}")
-            intp.addImports("org.json4s._")
-            intp.addImports("org.json4s.native.JsonMethods._")
-            intp.addImports("org.apache.atlas.tools.dsl._")
-            //intp.bindValue("service", ms)
-            //intp.bindValue("cp", intp.compilerClasspath)
-        }
-    }
-
-    override def printWelcome() {
-        echo("\n" +
-            "         \\,,,/\n" +
-            "         (o o)\n" +
-            "-----oOOo-(_)-oOOo-----")
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/tools/src/main/scala/org/apache/atlas/tools/dsl/DynamicTypedStruct.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/atlas/tools/dsl/DynamicTypedStruct.scala b/tools/src/main/scala/org/apache/atlas/tools/dsl/DynamicTypedStruct.scala
deleted file mode 100755
index 7bc52e3..0000000
--- a/tools/src/main/scala/org/apache/atlas/tools/dsl/DynamicTypedStruct.scala
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.tools.dsl
-
-import org.apache.atlas.typesystem.ITypedStruct
-import org.apache.atlas.typesystem.types.{StructType, TypeSystem}
-
-import scala.language.dynamics
-
-class DynamicTypedStruct(val ts: ITypedStruct) extends Dynamic {
-    def selectDynamic(name: String) = ts.get(name)
-
-    def updateDynamic(name: String)(value: Any) {
-        var value1 = value
-        if (value != null && value.isInstanceOf[DynamicTypedStruct]) {
-            value1 = value.asInstanceOf[DynamicTypedStruct].ts
-        }
-        ts.set(name, value1)
-    }
-
-    def dataType = TypeSystem.getInstance().getDataType(classOf[StructType], ts.getTypeName)
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/tools/src/main/scala/org/apache/atlas/tools/dsl/package.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/atlas/tools/dsl/package.scala b/tools/src/main/scala/org/apache/atlas/tools/dsl/package.scala
deleted file mode 100755
index 1d7fd43..0000000
--- a/tools/src/main/scala/org/apache/atlas/tools/dsl/package.scala
+++ /dev/null
@@ -1,99 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.tools
-
-import org.apache.atlas.repository.memory.MemRepository
-import org.apache.atlas.typesystem.json.{BigDecimalSerializer, BigIntegerSerializer, Serialization, TypedStructSerializer}
-import org.apache.atlas.typesystem.persistence.StructInstance
-import org.apache.atlas.typesystem.types._
-import org.apache.atlas.typesystem.{IStruct, ITypedStruct}
-import org.json4s._
-import org.json4s.native.JsonMethods._
-import org.json4s.native.Serialization.{write => swrite}
-
-import scala.collection.JavaConversions._
-import scala.language.implicitConversions
-
-package object dsl {
-
-    val defFormat = new DefaultFormats {
-        override protected def dateFormatter = TypeSystem.getInstance().getDateFormat;
-
-        override val typeHints = NoTypeHints
-    }
-
-    implicit val formats = defFormat + new TypedStructSerializer +
-        new BigDecimalSerializer + new BigIntegerSerializer
-    val BOOLEAN_TYPE = DataTypes.BOOLEAN_TYPE
-    val BYTE_TYPE = DataTypes.BYTE_TYPE
-    val SHORT_TYPE = DataTypes.SHORT_TYPE
-    val INT_TYPE = DataTypes.INT_TYPE
-    val LONG_TYPE = DataTypes.LONG_TYPE
-    val FLOAT_TYPE = DataTypes.FLOAT_TYPE
-    val DOUBLE_TYPE = DataTypes.DOUBLE_TYPE
-    val BIGINT_TYPE = DataTypes.BIGINTEGER_TYPE
-    val BIGDECIMAL_TYPE = DataTypes.BIGDECIMAL_TYPE
-    val DATE_TYPE = DataTypes.DATE_TYPE
-    val STRING_TYPE = DataTypes.STRING_TYPE
-    val ATTR_OPTIONAL = Multiplicity.OPTIONAL
-    val ATTR_REQUIRED = Multiplicity.REQUIRED
-
-    def repo = new MemRepository(ts)
-
-    def arrayType(dT: IDataType[_]) = ts.defineArrayType(dT)
-
-    def mapType(kT: IDataType[_], vT: IDataType[_]) = ts.defineMapType(kT, vT)
-
-    def attrDef(name: String, dT: IDataType[_],
-                m: Multiplicity = Multiplicity.OPTIONAL,
-                isComposite: Boolean = false,
-                reverseAttributeName: String = null) = {
-        require(name != null)
-        require(dT != null)
-        new AttributeDefinition(name, dT.getName, m, isComposite, reverseAttributeName)
-    }
-
-    def listTypes = ts.getTypeNames.sorted.toList.mkString("[", ",", "]")
-
-    def ts = TypeSystem.getInstance
-
-    def defineStructType(name: String, attrDef: AttributeDefinition*) = {
-        require(name != null)
-        ts.defineStructType(name, false, attrDef: _*)
-    }
-
-    def createInstance(typeName: String, jsonStr: String)(implicit formats: Formats) = {
-        val j = parse(jsonStr)
-        assert(j.isInstanceOf[JObject])
-        var j1 = j.asInstanceOf[JObject]
-        j1 = JObject(JField(Serialization.STRUCT_TYPE_FIELD_NAME, JString(typeName)) :: j1.obj)
-        new DynamicTypedStruct(Extraction.extract[StructInstance](j1))
-    }
-
-    def createInstance(typeName: String) = {
-        new DynamicTypedStruct(
-            ts.getDataType(classOf[StructType], typeName).asInstanceOf[IConstructableType[IStruct, ITypedStruct]].createInstance())
-    }
-
-    implicit def dynTypedStructToTypedStruct(s: DynamicTypedStruct) = s.ts
-
-    implicit def dynTypedStructToJson(s: DynamicTypedStruct)(implicit formats: Formats) = {
-        Extraction.decompose(s.ts)(formats)
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/tools/src/main/scala/org/apache/atlas/tools/simpleserver/Main.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/atlas/tools/simpleserver/Main.scala b/tools/src/main/scala/org/apache/atlas/tools/simpleserver/Main.scala
deleted file mode 100755
index 7b98d77..0000000
--- a/tools/src/main/scala/org/apache/atlas/tools/simpleserver/Main.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.tools.simpleserver
-
-import akka.actor.{ActorSystem, Props}
-import akka.io.IO
-import com.typesafe.config.ConfigFactory
-import org.apache.atlas.repository.memory.MemRepository
-import org.apache.atlas.typesystem.types.TypeSystem
-import spray.can.Http
-
-/**
- * A Simple Spray based server to test the TypeSystem and MemRepository.
- *
- * @example {{{
- *              -- Using the [[ https://github.com/jakubroztocil/httpie Httpie tool]]
- *
- *              http GET localhost:9140/listTypeNames
- *              pbpaste | http PUT localhost:9140/defineTypes
- *              http GET localhost:9140/typeDetails typeNames:='["Department", "Person", "Manager"]'
- *
- *              pbpaste | http PUT localhost:9140/createInstance
- *              pbpaste | http GET localhost:9140/getInstance
- *          }}}
- *
- *          - On the Mac, pbpaste makes available what is copied to clipboard. Copy contents of resources/sampleTypes.json
- *          - for createInstance resources/sampleInstance.json is an example
- *          - for getInstance send an Id back, you can copy the output from createInstance.
- *
- */
-object Main extends App {
-    val config = ConfigFactory.load()
-    val host = config.getString("http.host")
-    val port = config.getInt("http.port")
-
-    implicit val system = ActorSystem("atlasservice")
-
-    val typSys = TypeSystem.getInstance()
-    val memRepo = new MemRepository(typSys)
-
-    val api = system.actorOf(Props(new RestInterface(typSys, memRepo)), "httpInterface")
-    IO(Http) ! Http.Bind(listener = api, interface = host, port = port)
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/22624786/tools/src/main/scala/org/apache/atlas/tools/simpleserver/MetadataActor.scala
----------------------------------------------------------------------
diff --git a/tools/src/main/scala/org/apache/atlas/tools/simpleserver/MetadataActor.scala b/tools/src/main/scala/org/apache/atlas/tools/simpleserver/MetadataActor.scala
deleted file mode 100755
index 0429814..0000000
--- a/tools/src/main/scala/org/apache/atlas/tools/simpleserver/MetadataActor.scala
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.atlas.tools.simpleserver
-
-import akka.actor._
-import akka.util.Timeout
-import com.google.common.collect.ImmutableList
-import org.apache.atlas.repository.memory.MemRepository
-import org.apache.atlas.typesystem.json._
-import org.apache.atlas.typesystem.persistence.Id
-import org.apache.atlas.typesystem.types._
-import org.apache.atlas.typesystem.{ITypedReferenceableInstance, TypesDef}
-import org.json4s.{Formats, NoTypeHints}
-import spray.httpx.Json4sSupport
-
-import scala.concurrent.duration._
-
-
-class MetadataActor(val typeSystem: TypeSystem, val memRepository: MemRepository) extends Actor with ActorLogging {
-
-  import org.apache.atlas.tools.simpleserver.MetadataProtocol._
-
-  import scala.collection.JavaConversions._
-  import scala.language.postfixOps
-    implicit val timeout = Timeout(5 seconds)
-
-
-    def receive = {
-        case ListTypeNames() =>
-            sender ! TypeNames(typeSystem.getTypeNames.toList)
-
-        case GetTypeDetails(typeNames) =>
-            val typesDef = TypesSerialization.convertToTypesDef(typeSystem, (d: IDataType[_]) => typeNames.contains(d.getName))
-            sender ! TypeDetails(typesDef)
-
-        case DefineTypes(typesDef: TypesDef) =>
-            typesDef.enumTypes.foreach(typeSystem.defineEnumType(_))
-
-            typeSystem.defineTypes(ImmutableList.copyOf(typesDef.structTypes.toArray),
-                ImmutableList.copyOf(typesDef.traitTypes.toArray),
-                ImmutableList.copyOf(typesDef.classTypes.toArray))
-
-            var newTypes: List[HierarchicalType[_ <: HierarchicalType[_ <: AnyRef, _], _]] = Nil
-            typesDef.traitTypes.foreach { tDef =>
-                val nm = tDef.typeName
-                newTypes = newTypes :+
-                    typeSystem.getDataType(classOf[HierarchicalType[_ <: HierarchicalType[_ <: AnyRef, _], _]], nm)
-            }
-            typesDef.classTypes.foreach { tDef =>
-                val nm = tDef.typeName
-                newTypes = newTypes :+
-                    typeSystem.getDataType(classOf[HierarchicalType[_ <: HierarchicalType[_ <: AnyRef, _], _]], nm)
-            }
-
-            memRepository.defineTypes(newTypes)
-
-            sender ! TypesCreated
-
-        case CreateInstance(i) =>
-            val r = memRepository.create(i)
-            sender ! InstanceCreated(r.getId)
-
-        case GetInstance(id) =>
-            val r = memRepository.get(id)
-            sender ! InstanceDetails(r)
-    }
-
-}
-
-object MetadataProtocol {
-
-    case class ListTypeNames()
-
-    case class TypeNames(typeNames: List[String])
-
-    case class GetTypeDetails(typeNames: List[String])
-
-    case class TypeDetails(types: TypesDef)
-
-    case class DefineTypes(types: TypesDef)
-
-    case class TypesCreated()
-
-    case class CreateInstance(i: ITypedReferenceableInstance)
-
-    case class InstanceCreated(id: Id)
-
-    case class GetInstance(id: Id)
-
-    case class InstanceDetails(i: ITypedReferenceableInstance)
-
-}
-
-
-trait Json4sProtocol extends Json4sSupport {
-    val typeSystem: TypeSystem
-    val memRepository: MemRepository
-
-    implicit def json4sFormats: Formats =
-        org.json4s.native.Serialization.formats(NoTypeHints) + new MultiplicitySerializer +
-            new TypedStructSerializer +
-            new TypedReferenceableInstanceSerializer +
-            new BigDecimalSerializer + new BigIntegerSerializer + new IdSerializer
-}
-


Mime
View raw message