atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jma...@apache.org
Subject [08/51] [abbrv] incubator-atlas git commit: Refactor packages and scripts to Atlas (cherry picked from commit 414beba)
Date Sun, 14 Jun 2015 17:44:48 GMT
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/web/resources/RexsterGraphResource.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/RexsterGraphResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/RexsterGraphResource.java
new file mode 100755
index 0000000..7f8fd17
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/RexsterGraphResource.java
@@ -0,0 +1,408 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.resources;
+
+import com.thinkaurelius.titan.core.TitanGraph;
+import com.tinkerpop.blueprints.Direction;
+import com.tinkerpop.blueprints.Edge;
+import com.tinkerpop.blueprints.Element;
+import com.tinkerpop.blueprints.Graph;
+import com.tinkerpop.blueprints.Vertex;
+import com.tinkerpop.blueprints.VertexQuery;
+import com.tinkerpop.blueprints.util.io.graphson.GraphSONMode;
+import com.tinkerpop.blueprints.util.io.graphson.GraphSONUtility;
+import org.apache.atlas.MetadataServiceClient;
+import org.apache.atlas.repository.graph.GraphProvider;
+import org.apache.atlas.web.util.Servlets;
+import org.apache.commons.lang.StringUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Jersey Resource for lineage metadata operations.
+ * Implements most of the GET operations of Rexster API with out the indexes.
+ * https://github.com/tinkerpop/rexster/wiki/Basic-REST-API
+ *
+ * This is a subset of Rexster's REST API, designed to provide only read-only methods
+ * for accessing the backend graph.
+ */
+@Path("graph")
+@Singleton
+public class RexsterGraphResource {
+    public static final String OUT_E = "outE";
+    public static final String IN_E = "inE";
+    public static final String BOTH_E = "bothE";
+    public static final String OUT = "out";
+    public static final String IN = "in";
+    public static final String BOTH = "both";
+    public static final String OUT_COUNT = "outCount";
+    public static final String IN_COUNT = "inCount";
+    public static final String BOTH_COUNT = "bothCount";
+    public static final String OUT_IDS = "outIds";
+    public static final String IN_IDS = "inIds";
+    public static final String BOTH_IDS = "bothIds";
+    private static final Logger LOG = LoggerFactory.getLogger(RexsterGraphResource.class);
+
+    private TitanGraph graph;
+
+    @Inject
+    public RexsterGraphResource(GraphProvider<TitanGraph> graphProvider) {
+        this.graph = graphProvider.get();
+    }
+
+    private static void validateInputs(String errorMsg, String... inputs) {
+        for (String input : inputs) {
+            if (StringUtils.isEmpty(input)) {
+                throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
+                        .entity(errorMsg)
+                        .type("text/plain")
+                        .build());
+            }
+        }
+    }
+
+    protected Graph getGraph() {
+        return graph;
+    }
+
+    protected Set<String> getVertexIndexedKeys() {
+        return graph.getIndexedKeys(Vertex.class);
+    }
+
+    protected Set<String> getEdgeIndexedKeys() {
+        return graph.getIndexedKeys(Edge.class);
+    }
+
+    /**
+     * Get a single vertex with a unique id.
+     *
+     * GET http://host/metadata/lineage/vertices/id
+     * graph.getVertex(id);
+     */
+    @GET
+    @Path("/vertices/{id}")
+    @Produces({Servlets.JSON_MEDIA_TYPE})
+    public Response getVertex(@PathParam("id") final String vertexId) {
+        LOG.info("Get vertex for vertexId= {}", vertexId);
+        validateInputs("Invalid argument: vertex id passed is null or empty.", vertexId);
+        try {
+            Vertex vertex = findVertex(vertexId);
+
+            JSONObject response = new JSONObject();
+            response.put(MetadataServiceClient.RESULTS, GraphSONUtility.jsonFromElement(
+                    vertex, getVertexIndexedKeys(), GraphSONMode.NORMAL));
+            return Response.ok(response).build();
+        } catch (JSONException e) {
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+
+    private Vertex findVertex(String vertexId) {
+        Vertex vertex = getGraph().getVertex(vertexId);
+        if (vertex == null) {
+            String message = "Vertex with [" + vertexId + "] cannot be found.";
+            LOG.info(message);
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(message, Response.Status.NOT_FOUND));
+        }
+
+        return vertex;
+    }
+
+    /**
+     * Get properties for a single vertex with a unique id.
+     * This is NOT a rexster API.
+     * <p/>
+     * GET http://host/metadata/lineage/vertices/properties/id
+     */
+    @GET
+    @Path("/vertices/properties/{id}")
+    @Produces({Servlets.JSON_MEDIA_TYPE})
+    public Response getVertexProperties(@PathParam("id") final String vertexId,
+                                        @DefaultValue("false") @QueryParam("relationships")
+                                        final String relationships) {
+        LOG.info("Get vertex for vertexId= {}", vertexId);
+        validateInputs("Invalid argument: vertex id passed is null or empty.", vertexId);
+        try {
+            Vertex vertex = findVertex(vertexId);
+
+            Map<String, String> vertexProperties = getVertexProperties(vertex);
+
+            JSONObject response = new JSONObject();
+            response.put(MetadataServiceClient.RESULTS, new JSONObject(vertexProperties));
+            response.put(MetadataServiceClient.COUNT, vertexProperties.size());
+            return Response.ok(response).build();
+        } catch (JSONException e) {
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+
+    private Map<String, String> getVertexProperties(Vertex vertex) {
+        Map<String, String> vertexProperties = new HashMap<>();
+        for (String key : vertex.getPropertyKeys()) {
+            vertexProperties.put(key, vertex.<String>getProperty(key));
+        }
+
+        // todo: get the properties from relationships
+
+        return vertexProperties;
+    }
+
+    /**
+     * Get a list of vertices matching a property key and a value.
+     * <p/>
+     * GET http://host/metadata/lineage/vertices?key=<key>&value=<value>
+     * graph.getVertices(key, value);
+     */
+    @GET
+    @Path("/vertices")
+    @Produces({Servlets.JSON_MEDIA_TYPE})
+    public Response getVertices(@QueryParam("key") final String key,
+                                @QueryParam("value") final String value) {
+        LOG.info("Get vertices for property key= {}, value= {}", key, value);
+        validateInputs("Invalid argument: key or value passed is null or empty.", key, value);
+        try {
+            JSONObject response = buildJSONResponse(getGraph().getVertices(key, value));
+            return Response.ok(response).build();
+
+        } catch (JSONException e) {
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+
+    /**
+     * Get a list of adjacent edges with a direction.
+     *
+     * GET http://host/metadata/lineage/vertices/id/direction
+     * graph.getVertex(id).get{Direction}Edges();
+     * direction: {(?!outE)(?!bothE)(?!inE)(?!out)(?!both)(?!in)(?!query).+}
+     */
+    @GET
+    @Path("vertices/{id}/{direction}")
+    @Produces({Servlets.JSON_MEDIA_TYPE})
+    public Response getVertexEdges(@PathParam("id") String vertexId,
+                                   @PathParam("direction") String direction) {
+        LOG.info("Get vertex edges for vertexId= {}, direction= {}", vertexId, direction);
+        // Validate vertex id. Direction is validated in VertexQueryArguments.
+        validateInputs("Invalid argument: vertex id or direction passed is null or empty.",
+                vertexId, direction);
+        try {
+            Vertex vertex = findVertex(vertexId);
+
+            return getVertexEdges(vertex, direction);
+
+        } catch (JSONException e) {
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+
+    private Response getVertexEdges(Vertex vertex, String direction) throws JSONException {
+        // break out the segment into the return and the direction
+        VertexQueryArguments queryArguments = new VertexQueryArguments(direction);
+        // if this is a query and the _return is "count" then we don't bother to send back the
+        // result array
+        boolean countOnly = queryArguments.isCountOnly();
+        // what kind of data the calling client wants back (vertices, edges, count, vertex
+        // identifiers)
+        ReturnType returnType = queryArguments.getReturnType();
+        // the query direction (both, out, in)
+        Direction queryDirection = queryArguments.getQueryDirection();
+
+        VertexQuery query = vertex.query().direction(queryDirection);
+
+        JSONArray elementArray = new JSONArray();
+        long counter = 0;
+        if (returnType == ReturnType.VERTICES || returnType == ReturnType.VERTEX_IDS) {
+            Iterable<Vertex> vertexQueryResults = query.vertices();
+            for (Vertex v : vertexQueryResults) {
+                if (returnType.equals(ReturnType.VERTICES)) {
+                    elementArray.put(GraphSONUtility.jsonFromElement(
+                            v, getVertexIndexedKeys(), GraphSONMode.NORMAL));
+                } else {
+                    elementArray.put(v.getId());
+                }
+                counter++;
+            }
+        } else if (returnType == ReturnType.EDGES) {
+            Iterable<Edge> edgeQueryResults = query.edges();
+            for (Edge e : edgeQueryResults) {
+                elementArray.put(GraphSONUtility.jsonFromElement(
+                        e, getEdgeIndexedKeys(), GraphSONMode.NORMAL));
+                counter++;
+            }
+        } else if (returnType == ReturnType.COUNT) {
+            counter = query.count();
+        }
+
+        JSONObject response = new JSONObject();
+        if (!countOnly) {
+            response.put(MetadataServiceClient.RESULTS, elementArray);
+        }
+        response.put(MetadataServiceClient.COUNT, counter);
+        return Response.ok(response).build();
+    }
+
+    /**
+     * Get a single edge with a unique id.
+     *
+     * GET http://host/metadata/lineage/edges/id
+     * graph.getEdge(id);
+     */
+    @GET
+    @Path("/edges/{id}")
+    @Produces({Servlets.JSON_MEDIA_TYPE})
+    public Response getEdge(@PathParam("id") final String edgeId) {
+        LOG.info("Get vertex for edgeId= {}", edgeId);
+        validateInputs("Invalid argument: edge id passed is null or empty.", edgeId);
+        try {
+            Edge edge = getGraph().getEdge(edgeId);
+            if (edge == null) {
+                String message = "Edge with [" + edgeId + "] cannot be found.";
+                LOG.info(message);
+                throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
+                        .entity(Servlets.escapeJsonString(message)).build());
+            }
+
+            JSONObject response = new JSONObject();
+            response.put(MetadataServiceClient.RESULTS, GraphSONUtility.jsonFromElement(
+                    edge, getEdgeIndexedKeys(), GraphSONMode.NORMAL));
+            return Response.ok(response).build();
+        } catch (JSONException e) {
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+
+    private <T extends Element> JSONObject buildJSONResponse(Iterable<T> elements)
+    throws JSONException {
+        JSONArray vertexArray = new JSONArray();
+        long counter = 0;
+        for (Element element : elements) {
+            counter++;
+            vertexArray.put(GraphSONUtility.jsonFromElement(
+                    element, getVertexIndexedKeys(), GraphSONMode.NORMAL));
+        }
+
+        JSONObject response = new JSONObject();
+        response.put(MetadataServiceClient.RESULTS, vertexArray);
+        response.put(MetadataServiceClient.COUNT, counter);
+
+        return response;
+    }
+    private enum ReturnType {VERTICES, EDGES, COUNT, VERTEX_IDS}
+
+    /**
+     * Helper class for query arguments.
+     */
+    public static final class VertexQueryArguments {
+
+        private final Direction queryDirection;
+        private final ReturnType returnType;
+        private final boolean countOnly;
+
+        public VertexQueryArguments(String directionSegment) {
+            if (OUT_E.equals(directionSegment)) {
+                returnType = ReturnType.EDGES;
+                queryDirection = Direction.OUT;
+                countOnly = false;
+            } else if (IN_E.equals(directionSegment)) {
+                returnType = ReturnType.EDGES;
+                queryDirection = Direction.IN;
+                countOnly = false;
+            } else if (BOTH_E.equals(directionSegment)) {
+                returnType = ReturnType.EDGES;
+                queryDirection = Direction.BOTH;
+                countOnly = false;
+            } else if (OUT.equals(directionSegment)) {
+                returnType = ReturnType.VERTICES;
+                queryDirection = Direction.OUT;
+                countOnly = false;
+            } else if (IN.equals(directionSegment)) {
+                returnType = ReturnType.VERTICES;
+                queryDirection = Direction.IN;
+                countOnly = false;
+            } else if (BOTH.equals(directionSegment)) {
+                returnType = ReturnType.VERTICES;
+                queryDirection = Direction.BOTH;
+                countOnly = false;
+            } else if (BOTH_COUNT.equals(directionSegment)) {
+                returnType = ReturnType.COUNT;
+                queryDirection = Direction.BOTH;
+                countOnly = true;
+            } else if (IN_COUNT.equals(directionSegment)) {
+                returnType = ReturnType.COUNT;
+                queryDirection = Direction.IN;
+                countOnly = true;
+            } else if (OUT_COUNT.equals(directionSegment)) {
+                returnType = ReturnType.COUNT;
+                queryDirection = Direction.OUT;
+                countOnly = true;
+            } else if (BOTH_IDS.equals(directionSegment)) {
+                returnType = ReturnType.VERTEX_IDS;
+                queryDirection = Direction.BOTH;
+                countOnly = false;
+            } else if (IN_IDS.equals(directionSegment)) {
+                returnType = ReturnType.VERTEX_IDS;
+                queryDirection = Direction.IN;
+                countOnly = false;
+            } else if (OUT_IDS.equals(directionSegment)) {
+                returnType = ReturnType.VERTEX_IDS;
+                queryDirection = Direction.OUT;
+                countOnly = false;
+            } else {
+                throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
+                        .entity(Servlets.escapeJsonString(directionSegment + " segment was invalid."))
+                        .build());
+            }
+        }
+
+        public Direction getQueryDirection() {
+            return queryDirection;
+        }
+
+        public ReturnType getReturnType() {
+            return returnType;
+        }
+
+        public boolean isCountOnly() {
+            return countOnly;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java b/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java
new file mode 100755
index 0000000..ff65580
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/web/resources/TypesResource.java
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.resources;
+
+import com.sun.jersey.api.client.ClientResponse;
+import org.apache.atlas.MetadataException;
+import org.apache.atlas.MetadataServiceClient;
+import org.apache.atlas.services.MetadataService;
+import org.apache.atlas.typesystem.types.DataTypes;
+import org.apache.atlas.web.util.Servlets;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.inject.Inject;
+import javax.inject.Singleton;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.Response;
+import java.util.List;
+
+/**
+ * This class provides RESTful API for Types.
+ *
+ * A type is the description of any representable item;
+ * e.g. a Hive table
+ *
+ * You could represent any meta model representing any domain using these types.
+ */
+@Path("types")
+@Singleton
+public class TypesResource {
+
+    private static final Logger LOG = LoggerFactory.getLogger(EntityResource.class);
+
+    private final MetadataService metadataService;
+
+    static final String TYPE_ALL = "all";
+
+    @Inject
+    public TypesResource(MetadataService metadataService) {
+        this.metadataService = metadataService;
+    }
+
+    /**
+     * Submits a type definition corresponding to a given type representing a meta model of a
+     * domain. Could represent things like Hive Database, Hive Table, etc.
+     */
+    @POST
+    @Consumes(Servlets.JSON_MEDIA_TYPE)
+    @Produces(Servlets.JSON_MEDIA_TYPE)
+    public Response submit(@Context HttpServletRequest request) {
+        try {
+            final String typeDefinition = Servlets.getRequestPayload(request);
+            LOG.debug("Creating type with definition {} ", typeDefinition);
+
+            JSONObject typesJson = metadataService.createType(typeDefinition);
+            final JSONArray typesJsonArray = typesJson.getJSONArray(MetadataServiceClient.TYPES);
+
+            JSONArray typesResponse = new JSONArray();
+            for (int i = 0; i < typesJsonArray.length(); i++) {
+                final String name = typesJsonArray.getString(i);
+                typesResponse.put(new JSONObject() {{
+                    put(MetadataServiceClient.NAME, name);
+                }});
+            }
+
+            JSONObject response = new JSONObject();
+            response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
+            response.put(MetadataServiceClient.TYPES, typesResponse);
+            return Response.status(ClientResponse.Status.CREATED).entity(response).build();
+        } catch (MetadataException | IllegalArgumentException e) {
+            LOG.error("Unable to persist types", e);
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
+        } catch (Throwable e) {
+            LOG.error("Unable to persist types", e);
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+
+    /**
+     * Fetch the complete definition of a given type name which is unique.
+     *
+     * @param typeName name of a type which is unique.
+     */
+    @GET
+    @Path("{typeName}")
+    @Produces(Servlets.JSON_MEDIA_TYPE)
+    public Response getDefinition(@Context HttpServletRequest request,
+                                  @PathParam("typeName") String typeName) {
+        try {
+            final String typeDefinition = metadataService.getTypeDefinition(typeName);
+
+            JSONObject response = new JSONObject();
+            response.put(MetadataServiceClient.TYPENAME, typeName);
+            response.put(MetadataServiceClient.DEFINITION, typeDefinition);
+            response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
+
+            return Response.ok(response).build();
+        } catch (MetadataException e) {
+            LOG.error("Unable to get type definition for type {}", typeName, e);
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.NOT_FOUND));
+        } catch (JSONException | IllegalArgumentException e) {
+            LOG.error("Unable to get type definition for type {}", typeName, e);
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.BAD_REQUEST));
+        } catch (Throwable e) {
+            LOG.error("Unable to get type definition for type {}", typeName, e);
+            throw new WebApplicationException(
+                Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+
+    /**
+     * Gets the list of trait type names registered in the type system.
+     *
+     * @param type type should be the name of enum
+     *             org.apache.atlas.typesystem.types.DataTypes.TypeCategory
+     *             Typically, would be one of all, TRAIT, CLASS, ENUM, STRUCT
+     * @return entity names response payload as json
+     */
+    @GET
+    @Produces(Servlets.JSON_MEDIA_TYPE)
+    public Response getTypesByFilter(@Context HttpServletRequest request,
+                                     @DefaultValue(TYPE_ALL) @QueryParam("type") String type) {
+        try {
+            List<String> result;
+            if (TYPE_ALL.equals(type)) {
+                result = metadataService.getTypeNamesList();
+            } else {
+                DataTypes.TypeCategory typeCategory = DataTypes.TypeCategory.valueOf(type);
+                result = metadataService.getTypeNamesByCategory(typeCategory);
+            }
+
+            JSONObject response = new JSONObject();
+            response.put(MetadataServiceClient.RESULTS, new JSONArray(result));
+            response.put(MetadataServiceClient.COUNT, result.size());
+            response.put(MetadataServiceClient.REQUEST_ID, Servlets.getRequestId());
+
+            return Response.ok(response).build();
+        } catch (IllegalArgumentException | MetadataException ie) {
+            LOG.error("Unsupported typeName while retrieving type list {}", type);
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse("Unsupported type " + type, Response.Status.BAD_REQUEST));
+        } catch (Throwable e) {
+            LOG.error("Unable to get types list", e);
+            throw new WebApplicationException(
+                    Servlets.getErrorResponse(e, Response.Status.INTERNAL_SERVER_ERROR));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
new file mode 100755
index 0000000..48285f0
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/web/service/EmbeddedServer.java
@@ -0,0 +1,87 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.service;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.bio.SocketConnector;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+import java.io.IOException;
+
+/**
+ * This class embeds a Jetty server and a connector.
+ */
+public class EmbeddedServer {
+    private static final int DEFAULT_BUFFER_SIZE = 16192;
+
+    protected final Server server = new Server();
+
+    public EmbeddedServer(int port, String path) throws IOException {
+        Connector connector = getConnector(port);
+        server.addConnector(connector);
+
+        WebAppContext application = new WebAppContext(path, "/");
+        server.setHandler(application);
+    }
+
+    public static EmbeddedServer newServer(int port, String path, boolean secure) throws IOException {
+        if (secure) {
+            return new SecureEmbeddedServer(port, path);
+        } else {
+            return new EmbeddedServer(port, path);
+        }
+    }
+
+    protected Connector getConnector(int port) throws IOException {
+        Connector connector = new SocketConnector();
+        connector.setPort(port);
+        connector.setHost("0.0.0.0");
+
+        // this is to enable large header sizes when Kerberos is enabled with AD
+        final Integer bufferSize = getBufferSize();
+        connector.setHeaderBufferSize(bufferSize);
+        connector.setRequestBufferSize(bufferSize);
+
+        return connector;
+    }
+
+    private Integer getBufferSize() {
+        try {
+            PropertiesConfiguration configuration = new PropertiesConfiguration(
+                    "application.properties");
+            return configuration.getInt("atlas.jetty.request.buffer.size", DEFAULT_BUFFER_SIZE);
+        } catch (ConfigurationException e) {
+            // do nothing
+        }
+
+        return DEFAULT_BUFFER_SIZE;
+    }
+
+    public void start() throws Exception {
+        server.start();
+        server.join();
+    }
+
+    public void stop() throws Exception {
+        server.stop();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/web/service/SecureEmbeddedServer.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/service/SecureEmbeddedServer.java b/webapp/src/main/java/org/apache/atlas/web/service/SecureEmbeddedServer.java
new file mode 100755
index 0000000..19e7783
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/web/service/SecureEmbeddedServer.java
@@ -0,0 +1,116 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.service;
+
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.commons.configuration.PropertiesConfiguration;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.alias.CredentialProvider;
+import org.apache.hadoop.security.alias.CredentialProviderFactory;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.security.SslSocketConnector;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+import static org.apache.atlas.security.SecurityProperties.CERT_STORES_CREDENTIAL_PROVIDER_PATH;
+import static org.apache.atlas.security.SecurityProperties.CLIENT_AUTH_KEY;
+import static org.apache.atlas.security.SecurityProperties.DEFATULT_TRUSTORE_FILE_LOCATION;
+import static org.apache.atlas.security.SecurityProperties.DEFAULT_KEYSTORE_FILE_LOCATION;
+import static org.apache.atlas.security.SecurityProperties.KEYSTORE_FILE_KEY;
+import static org.apache.atlas.security.SecurityProperties.KEYSTORE_PASSWORD_KEY;
+import static org.apache.atlas.security.SecurityProperties.SERVER_CERT_PASSWORD_KEY;
+import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_FILE_KEY;
+import static org.apache.atlas.security.SecurityProperties.TRUSTSTORE_PASSWORD_KEY;
+
+/**
+ * This is a jetty server which requires client auth via certificates.
+ */
+public class SecureEmbeddedServer extends EmbeddedServer {
+
+    private static final Logger LOG = LoggerFactory.getLogger(SecureEmbeddedServer.class);
+
+    public SecureEmbeddedServer(int port, String path) throws IOException {
+        super(port, path);
+    }
+
+    protected Connector getConnector(int port) throws IOException {
+        PropertiesConfiguration config = getConfiguration();
+
+        SslSocketConnector connector = new SslSocketConnector();
+        connector.setPort(port);
+        connector.setHost("0.0.0.0");
+        connector.setKeystore(config.getString(KEYSTORE_FILE_KEY,
+                System.getProperty(KEYSTORE_FILE_KEY, DEFAULT_KEYSTORE_FILE_LOCATION)));
+        connector.setKeyPassword(getPassword(config, KEYSTORE_PASSWORD_KEY));
+        connector.setTruststore(config.getString(TRUSTSTORE_FILE_KEY,
+                System.getProperty(TRUSTSTORE_FILE_KEY, DEFATULT_TRUSTORE_FILE_LOCATION)));
+        connector.setTrustPassword(getPassword(config, TRUSTSTORE_PASSWORD_KEY));
+        connector.setPassword(getPassword(config, SERVER_CERT_PASSWORD_KEY));
+        connector.setWantClientAuth(config.getBoolean(CLIENT_AUTH_KEY, Boolean.getBoolean(CLIENT_AUTH_KEY)));
+        return connector;
+    }
+
+    /**
+     * Retrieves a password from a configured credential provider or prompts for the password and stores it in the
+     * configured credential provider.
+     * @param config application configuration
+     * @param key the key/alias for the password.
+     * @return  the password.
+     * @throws IOException
+     */
+    private String getPassword(PropertiesConfiguration config, String key) throws IOException {
+
+        String password;
+
+        String provider = config.getString(CERT_STORES_CREDENTIAL_PROVIDER_PATH);
+        if (provider != null) {
+            LOG.info("Attempting to retrieve password from configured credential provider path");
+            Configuration c = new Configuration();
+            c.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, provider);
+            CredentialProvider credentialProvider =
+                    CredentialProviderFactory.getProviders(c).get(0);
+            CredentialProvider.CredentialEntry entry = credentialProvider.getCredentialEntry(key);
+            if (entry == null) {
+                throw new IOException(String.format("No credential entry found for %s. " +
+                        "Please create an entry in the configured credential provider", key));
+            } else {
+                password = String.valueOf(entry.getCredential());
+            }
+
+        } else {
+            throw new IOException("No credential provider path configured for storage of certificate store passwords");
+        }
+
+        return password;
+    }
+
+    /**
+     * Returns the application configuration.
+     * @return
+     */
+    protected PropertiesConfiguration getConfiguration() {
+        try {
+            return new PropertiesConfiguration("application.properties");
+        } catch (ConfigurationException e) {
+            throw new RuntimeException("Unable to load configuration: application.properties");
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/web/util/DateTimeHelper.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/util/DateTimeHelper.java b/webapp/src/main/java/org/apache/atlas/web/util/DateTimeHelper.java
new file mode 100755
index 0000000..7dced7e
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/web/util/DateTimeHelper.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.util;
+
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Support function to parse and format date.
+ */
+public final class DateTimeHelper {
+
+    public static final String ISO8601_FORMAT = "yyyy-MM-dd'T'HH:mm'Z'";
+    private static final String DATE_PATTERN =
+            "(2\\d\\d\\d|19\\d\\d)-(0[1-9]|1[012])-(0[1-9]|1[0-9]|2[0-9]|3[01])T" +
+                    "([0-1][0-9]|2[0-3]):([0-5][0-9])Z";
+    private static final Pattern PATTERN = Pattern.compile(DATE_PATTERN);
+
+    private DateTimeHelper() {
+    }
+
+    public static String getTimeZoneId(TimeZone tz) {
+        return tz.getID();
+    }
+
+    public static DateFormat getDateFormat() {
+        DateFormat dateFormat = new SimpleDateFormat(ISO8601_FORMAT);
+        dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+        return dateFormat;
+    }
+
+    public static String formatDateUTC(Date date) {
+        return (date != null) ? getDateFormat().format(date) : null;
+    }
+
+    public static Date parseDateUTC(String dateStr) {
+        if (!validate(dateStr)) {
+            throw new IllegalArgumentException(dateStr + " is not a valid UTC string");
+        }
+        try {
+            return getDateFormat().parse(dateStr);
+        } catch (ParseException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public static String formatDateUTCToISO8601(final String dateString,
+                                                final String dateStringFormat) {
+
+        try {
+            DateFormat dateFormat = new SimpleDateFormat(
+                    dateStringFormat.substring(0, dateString.length()));
+            dateFormat.setTimeZone(TimeZone.getTimeZone("UTC"));
+            return DateTimeHelper.formatDateUTC(dateFormat.parse(dateString));
+        } catch (ParseException e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    /**
+     * Validate date format with regular expression.
+     *
+     * @param date date address for validation
+     * @return true valid date fromat, false invalid date format
+     */
+    public static boolean validate(final String date) {
+
+        Matcher matcher = PATTERN.matcher(date);
+
+        if (matcher.matches()) {
+
+            matcher.reset();
+
+            if (matcher.find()) {
+
+                int year = Integer.parseInt(matcher.group(1));
+                String month = matcher.group(2);
+                String day = matcher.group(3);
+
+                if (day.equals("31")
+                        && (month.equals("4") || month.equals("6")
+                        || month.equals("9") || month.equals("11")
+                        || month.equals("04") || month.equals("06") || month.equals("09"))) {
+                    return false; // only 1,3,5,7,8,10,12 has 31 days
+                } else if (month.equals("2") || month.equals("02")) {
+                    // leap year
+                    if (year % 4 == 0) {
+                        return !(day.equals("30") || day.equals("31"));
+                    } else {
+                        return !(day.equals("29") || day.equals("30") || day.equals("31"));
+                    }
+                } else {
+                    return true;
+                }
+            } else {
+                return false;
+            }
+        } else {
+            return false;
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java b/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java
new file mode 100755
index 0000000..996ead2
--- /dev/null
+++ b/webapp/src/main/java/org/apache/atlas/web/util/Servlets.java
@@ -0,0 +1,152 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.atlas.web.util;
+
+import org.apache.atlas.MetadataServiceClient;
+import org.apache.atlas.ParamChecker;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+/**
+ * Utility functions for dealing with servlets.
+ */
+public final class Servlets {
+
+    private static final Logger LOG = LoggerFactory.getLogger(Servlets.class);
+    private Servlets() {
+        /* singleton */
+    }
+
+    public static final String JSON_MEDIA_TYPE = MediaType.APPLICATION_JSON + "; charset=UTF-8";
+
+    /**
+     * Returns the user of the given request.
+     *
+     * @param httpRequest    an HTTP servlet request
+     * @return the user
+     */
+    public static String getUserFromRequest(HttpServletRequest httpRequest) {
+        String user = httpRequest.getRemoteUser();
+        if (!StringUtils.isEmpty(user)) {
+            return user;
+        }
+
+        user = httpRequest.getParameter("user.name"); // available in query-param
+        if (!StringUtils.isEmpty(user)) {
+            return user;
+        }
+
+        user = httpRequest.getHeader("Remote-User"); // backwards-compatibility
+        if (!StringUtils.isEmpty(user)) {
+            return user;
+        }
+
+        return null;
+    }
+
+    /**
+     * Returns the URI of the given request.
+     *
+     * @param httpRequest    an HTTP servlet request
+     * @return the URI, including the query string
+     */
+    public static String getRequestURI(HttpServletRequest httpRequest) {
+        final StringBuilder url = new StringBuilder(100).append(httpRequest.getRequestURI());
+        if (httpRequest.getQueryString() != null) {
+            url.append('?').append(httpRequest.getQueryString());
+        }
+
+        return url.toString();
+    }
+
+    /**
+     * Returns the full URL of the given request.
+     *
+     * @param httpRequest    an HTTP servlet request
+     * @return the full URL, including the query string
+     */
+    public static String getRequestURL(HttpServletRequest httpRequest) {
+        final StringBuilder url = new StringBuilder(100).append(httpRequest.getRequestURL());
+        if (httpRequest.getQueryString() != null) {
+            url.append('?').append(httpRequest.getQueryString());
+        }
+
+        return url.toString();
+    }
+
+    public static Response getErrorResponse(Throwable e, Response.Status status) {
+        Response response = getErrorResponse(e.getMessage(), status);
+        JSONObject responseJson = (JSONObject) response.getEntity();
+        try {
+            responseJson.put(MetadataServiceClient.STACKTRACE, printStackTrace(e));
+        } catch (JSONException e1) {
+            LOG.warn("Could not construct error Json rensponse", e1);
+        }
+        return response;
+    }
+
+    private static String printStackTrace(Throwable t) {
+        StringWriter sw = new StringWriter();
+        t.printStackTrace(new PrintWriter(sw));
+        return sw.toString();
+    }
+
+    public static Response getErrorResponse(String message, Response.Status status) {
+        JSONObject errorJson = new JSONObject();
+        Object errorEntity = Servlets.escapeJsonString(message);
+        try {
+            errorJson.put(MetadataServiceClient.ERROR, errorEntity);
+            errorEntity = errorJson;
+        } catch (JSONException jsonE) {
+            LOG.warn("Could not construct error Json rensponse", jsonE);
+        }
+        return Response
+                .status(status)
+                .entity(errorEntity)
+                .type(JSON_MEDIA_TYPE)
+                .build();
+    }
+
+    public static String getRequestPayload(HttpServletRequest request) throws IOException {
+        StringWriter writer = new StringWriter();
+        IOUtils.copy(request.getInputStream(), writer);
+        return writer.toString();
+    }
+
+    public static String getRequestId() {
+        return Thread.currentThread().getName();
+    }
+
+    public static String escapeJsonString(String inputStr) {
+        ParamChecker.notNull(inputStr, "Input String cannot be null");
+        return StringEscapeUtils.escapeJson(inputStr);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/hadoop/metadata/Main.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/Main.java b/webapp/src/main/java/org/apache/hadoop/metadata/Main.java
deleted file mode 100755
index e778d45..0000000
--- a/webapp/src/main/java/org/apache/hadoop/metadata/Main.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.metadata.web.service.EmbeddedServer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Iterator;
-
-/**
- * Driver for running Metadata as a standalone server with embedded jetty server.
- */
-public final class Main {
-    private static final Logger LOG = LoggerFactory.getLogger(Main.class);
-    private static final String APP_PATH = "app";
-    private static final String APP_PORT = "port";
-    private static final String METADATA_HOME = "metadata.home";
-    private static final String METADATA_LOG_DIR = "metadata.log.dir";
-
-    /**
-     * Prevent users from constructing this.
-     */
-    private Main() {
-    }
-
-    private static CommandLine parseArgs(String[] args) throws ParseException {
-        Options options = new Options();
-        Option opt;
-
-        opt = new Option(APP_PATH, true, "Application Path");
-        opt.setRequired(false);
-        options.addOption(opt);
-
-        opt = new Option(APP_PORT, true, "Application Port");
-        opt.setRequired(false);
-        options.addOption(opt);
-
-        return new GnuParser().parse(options, args);
-    }
-
-    public static void main(String[] args) throws Exception {
-        CommandLine cmd = parseArgs(args);
-        PropertiesConfiguration buildConfiguration =
-                new PropertiesConfiguration("metadata-buildinfo.properties");
-        String appPath = "webapp/target/metadata-webapp-" + getProjectVersion(buildConfiguration);
-
-        if (cmd.hasOption(APP_PATH)) {
-            appPath = cmd.getOptionValue(APP_PATH);
-        }
-
-        setApplicationHome();
-        PropertiesConfiguration configuration = PropertiesUtil.getApplicationProperties();
-        final String enableTLSFlag = configuration.getString("metadata.enableTLS");
-        final int appPort = getApplicationPort(cmd, enableTLSFlag);
-        final boolean enableTLS = isTLSEnabled(enableTLSFlag, appPort);
-        configuration.setProperty("metadata.enableTLS", String.valueOf(enableTLS));
-
-        showStartupInfo(buildConfiguration, enableTLS, appPort);
-        EmbeddedServer server = EmbeddedServer.newServer(appPort, appPath, enableTLS);
-        server.start();
-    }
-
-    private static void setApplicationHome() {
-        if (System.getProperty(METADATA_HOME) == null) {
-            System.setProperty(METADATA_HOME, "target");
-        }
-        if (System.getProperty(METADATA_LOG_DIR) == null) {
-            System.setProperty(METADATA_LOG_DIR, "target/logs");
-        }
-    }
-
-    public static String getProjectVersion(PropertiesConfiguration buildConfiguration) {
-        return buildConfiguration.getString("project.version");
-    }
-
-    private static int getApplicationPort(CommandLine cmd, String enableTLSFlag) {
-        final int appPort;
-        if (cmd.hasOption(APP_PORT)) {
-            appPort = Integer.valueOf(cmd.getOptionValue(APP_PORT));
-        } else {
-            // default : metadata.enableTLS is true
-            appPort = StringUtils.isEmpty(enableTLSFlag)
-                    || enableTLSFlag.equals("true") ? 21443 : 21000;
-        }
-
-        return appPort;
-    }
-
-    private static boolean isTLSEnabled(String enableTLSFlag, int appPort) {
-        return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag)
-                ? System
-                .getProperty("metadata.enableTLS", (appPort % 1000) == 443 ? "true" : "false")
-                : enableTLSFlag);
-    }
-
-    private static void showStartupInfo(PropertiesConfiguration buildConfiguration,
-                                        boolean enableTLS, int appPort) {
-        StringBuilder buffer = new StringBuilder();
-        buffer.append("\n############################################");
-        buffer.append("############################################");
-        buffer.append("\n                               DGI Server (STARTUP)");
-        buffer.append("\n");
-        try {
-            final Iterator<String> keys = buildConfiguration.getKeys();
-            while (keys.hasNext()) {
-                String key = keys.next();
-                buffer.append('\n').append('\t').append(key).
-                        append(":\t").append(buildConfiguration.getProperty(key));
-            }
-        } catch (Throwable e) {
-            buffer.append("*** Unable to get build info ***");
-        }
-        buffer.append("\n############################################");
-        buffer.append("############################################");
-        LOG.info(buffer.toString());
-        LOG.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
-        LOG.info("Server starting with TLS ? {} on port {}", enableTLS, appPort);
-        LOG.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<");
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/hadoop/metadata/examples/QuickStart.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/examples/QuickStart.java b/webapp/src/main/java/org/apache/hadoop/metadata/examples/QuickStart.java
deleted file mode 100755
index bf1ef39..0000000
--- a/webapp/src/main/java/org/apache/hadoop/metadata/examples/QuickStart.java
+++ /dev/null
@@ -1,478 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.examples;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.ImmutableList;
-import org.apache.hadoop.metadata.MetadataServiceClient;
-import org.apache.hadoop.metadata.typesystem.Referenceable;
-import org.apache.hadoop.metadata.typesystem.TypesDef;
-import org.apache.hadoop.metadata.typesystem.json.InstanceSerialization;
-import org.apache.hadoop.metadata.typesystem.json.TypesSerialization;
-import org.apache.hadoop.metadata.typesystem.persistence.Id;
-import org.apache.hadoop.metadata.typesystem.types.AttributeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.ClassType;
-import org.apache.hadoop.metadata.typesystem.types.DataTypes;
-import org.apache.hadoop.metadata.typesystem.types.EnumTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.HierarchicalTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.IDataType;
-import org.apache.hadoop.metadata.typesystem.types.Multiplicity;
-import org.apache.hadoop.metadata.typesystem.types.StructTypeDefinition;
-import org.apache.hadoop.metadata.typesystem.types.TraitType;
-import org.apache.hadoop.metadata.typesystem.types.TypeUtils;
-import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil;
-import org.codehaus.jettison.json.JSONArray;
-import org.codehaus.jettison.json.JSONObject;
-
-import java.util.List;
-
-/**
- * A driver that sets up sample types and data for testing purposes.
- * Please take a look at QueryDSL in docs for the Meta Model.
- * todo - move this to examples module.
- */
-public class QuickStart {
-
-    public static void main(String[] args) throws Exception {
-        String baseUrl = getServerUrl(args);
-        QuickStart quickStart = new QuickStart(baseUrl);
-
-        // Shows how to create types in DGI for your meta model
-        quickStart.createTypes();
-
-        // Shows how to create entities (instances) for the added types in DGI
-        quickStart.createEntities();
-
-        // Shows some search queries using DSL based on types
-        quickStart.search();
-    }
-
-    static String getServerUrl(String[] args) {
-        String baseUrl = "http://localhost:21000";
-        if (args.length > 0) {
-            baseUrl = args[0];
-        }
-
-        return baseUrl;
-    }
-
-    private static final String DATABASE_TYPE = "DB";
-    private static final String COLUMN_TYPE = "Column";
-    private static final String TABLE_TYPE = "Table";
-    private static final String VIEW_TYPE = "View";
-    private static final String LOAD_PROCESS_TYPE = "LoadProcess";
-    private static final String STORAGE_DESC_TYPE = "StorageDesc";
-
-    private static final String[] TYPES = {
-            DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, VIEW_TYPE,
-            "JdbcAccess", "ETL", "Metric", "PII", "Fact", "Dimension"
-    };
-
-    private final MetadataServiceClient metadataServiceClient;
-
-    QuickStart(String baseUrl) {
-        metadataServiceClient = new MetadataServiceClient(baseUrl);
-    }
-
-    void createTypes() throws Exception {
-        TypesDef typesDef = createTypeDefinitions();
-
-        String typesAsJSON = TypesSerialization.toJson(typesDef);
-        System.out.println("typesAsJSON = " + typesAsJSON);
-        metadataServiceClient.createType(typesAsJSON);
-
-        // verify types created
-        verifyTypesCreated();
-    }
-
-    TypesDef createTypeDefinitions() throws Exception {
-        HierarchicalTypeDefinition<ClassType> dbClsDef
-                = TypesUtil.createClassTypeDef(DATABASE_TYPE, null,
-                attrDef("name", DataTypes.STRING_TYPE),
-                attrDef("description", DataTypes.STRING_TYPE),
-                attrDef("locationUri", DataTypes.STRING_TYPE),
-                attrDef("owner", DataTypes.STRING_TYPE),
-                attrDef("createTime", DataTypes.INT_TYPE)
-        );
-
-        HierarchicalTypeDefinition<ClassType> storageDescClsDef =
-                TypesUtil.createClassTypeDef(STORAGE_DESC_TYPE, null,
-                        attrDef("location", DataTypes.STRING_TYPE),
-                        attrDef("inputFormat", DataTypes.STRING_TYPE),
-                        attrDef("outputFormat", DataTypes.STRING_TYPE),
-                        attrDef("compressed", DataTypes.STRING_TYPE,
-                                Multiplicity.REQUIRED, false, null)
-                );
-
-        HierarchicalTypeDefinition<ClassType> columnClsDef =
-                TypesUtil.createClassTypeDef(COLUMN_TYPE, null,
-                        attrDef("name", DataTypes.STRING_TYPE),
-                        attrDef("dataType", DataTypes.STRING_TYPE),
-                        attrDef("comment", DataTypes.STRING_TYPE)
-                );
-
-        HierarchicalTypeDefinition<ClassType> tblClsDef =
-                 TypesUtil.createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"),
-                        new AttributeDefinition("db", DATABASE_TYPE,
-                                Multiplicity.REQUIRED, false, null),
-                        new AttributeDefinition("sd", STORAGE_DESC_TYPE,
-                                Multiplicity.REQUIRED, true, null),
-                        attrDef("owner", DataTypes.STRING_TYPE),
-                        attrDef("createTime", DataTypes.INT_TYPE),
-                        attrDef("lastAccessTime", DataTypes.INT_TYPE),
-                        attrDef("retention", DataTypes.INT_TYPE),
-                        attrDef("viewOriginalText", DataTypes.STRING_TYPE),
-                        attrDef("viewExpandedText", DataTypes.STRING_TYPE),
-                        attrDef("tableType", DataTypes.STRING_TYPE),
-                        attrDef("temporary", DataTypes.BOOLEAN_TYPE),
-                        new AttributeDefinition("columns",
-                                DataTypes.arrayTypeName(COLUMN_TYPE),
-                                Multiplicity.COLLECTION, true, null)
-                );
-
-        HierarchicalTypeDefinition<ClassType> loadProcessClsDef =
-                 TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"),
-                        attrDef("userName", DataTypes.STRING_TYPE),
-                        attrDef("startTime", DataTypes.INT_TYPE),
-                        attrDef("endTime", DataTypes.INT_TYPE),
-                        attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
-                        attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
-                        attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED),
-                        attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED)
-                );
-
-        HierarchicalTypeDefinition<ClassType> viewClsDef =
-                TypesUtil.createClassTypeDef(VIEW_TYPE, null,
-                        attrDef("name", DataTypes.STRING_TYPE),
-                        new AttributeDefinition("db", DATABASE_TYPE,
-                                Multiplicity.REQUIRED, false, null),
-                        new AttributeDefinition("inputTables",
-                                DataTypes.arrayTypeName(TABLE_TYPE),
-                                Multiplicity.COLLECTION, false, null)
-                );
-
-        HierarchicalTypeDefinition<TraitType> dimTraitDef =
-                TypesUtil.createTraitTypeDef("Dimension", null);
-
-        HierarchicalTypeDefinition<TraitType> factTraitDef =
-                TypesUtil.createTraitTypeDef("Fact", null);
-
-        HierarchicalTypeDefinition<TraitType> piiTraitDef =
-                TypesUtil.createTraitTypeDef("PII", null);
-
-        HierarchicalTypeDefinition<TraitType> metricTraitDef =
-                TypesUtil.createTraitTypeDef("Metric", null);
-
-        HierarchicalTypeDefinition<TraitType> etlTraitDef =
-                TypesUtil.createTraitTypeDef("ETL", null);
-
-        HierarchicalTypeDefinition<TraitType> jdbcTraitDef =
-                TypesUtil.createTraitTypeDef("JdbcAccess", null);
-
-        return TypeUtils.getTypesDef(
-                ImmutableList.<EnumTypeDefinition>of(),
-                ImmutableList.<StructTypeDefinition>of(),
-                ImmutableList.of(dimTraitDef, factTraitDef,
-                        piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef),
-                ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef,
-                        tblClsDef, loadProcessClsDef, viewClsDef)
-        );
-    }
-
-    AttributeDefinition attrDef(String name, IDataType dT) {
-        return attrDef(name, dT, Multiplicity.OPTIONAL, false, null);
-    }
-
-    AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) {
-        return attrDef(name, dT, m, false, null);
-    }
-
-    AttributeDefinition attrDef(String name, IDataType dT,
-                                Multiplicity m, boolean isComposite, String reverseAttributeName) {
-        Preconditions.checkNotNull(name);
-        Preconditions.checkNotNull(dT);
-        return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName);
-    }
-
-    void createEntities() throws Exception {
-        Id salesDB = database(
-                "Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
-
-
-        Referenceable sd = rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales",
-                "TextInputFormat", "TextOutputFormat", true);
-
-        List<Referenceable> salesFactColumns = ImmutableList.of(
-            rawColumn("time_id", "int", "time id"),
-            rawColumn("product_id", "int", "product id"),
-            rawColumn("customer_id", "int", "customer id", "PII"),
-            rawColumn("sales", "double", "product id", "Metric")
-        );
-
-        Id salesFact = table("sales_fact", "sales fact table",
-                salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
-
-        List<Referenceable> productDimColumns = ImmutableList.of(
-            rawColumn("product_id", "int", "product id"),
-            rawColumn("product_name", "string", "product name"),
-            rawColumn("brand_name", "int", "brand name")
-        );
-
-        Id productDim = table("product_dim", "product dimension table",
-                salesDB, sd, "John Doe", "Managed", productDimColumns, "Dimension");
-
-        List<Referenceable> timeDimColumns = ImmutableList.of(
-            rawColumn("time_id", "int", "time id"),
-            rawColumn("dayOfYear", "int", "day Of Year"),
-            rawColumn("weekDay", "int", "week Day")
-        );
-
-        Id timeDim = table("time_dim", "time dimension table",
-                salesDB, sd, "John Doe", "External", timeDimColumns, "Dimension");
-
-
-        List<Referenceable> customerDimColumns = ImmutableList.of(
-            rawColumn("customer_id", "int", "customer id", "PII"),
-            rawColumn("name", "string", "customer name", "PII"),
-            rawColumn("address", "string", "customer address", "PII")
-        );
-
-        Id customerDim = table("customer_dim", "customer dimension table",
-                salesDB, sd, "fetl", "External", customerDimColumns, "Dimension");
-
-
-        Id reportingDB = database("Reporting", "reporting database", "Jane BI",
-                "hdfs://host:8000/apps/warehouse/reporting");
-
-        Id salesFactDaily = table("sales_fact_daily_mv",
-                "sales fact daily materialized view", reportingDB, sd,
-                "Joe BI", "Managed", salesFactColumns, "Metric");
-
-        loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL",
-                ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily),
-                "create table as select ", "plan", "id", "graph",
-                "ETL");
-
-        view("product_dim_view", reportingDB,
-                ImmutableList.of(productDim), "Dimension", "JdbcAccess");
-
-        view("customer_dim_view", reportingDB,
-                ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
-
-        Id salesFactMonthly = table("sales_fact_monthly_mv",
-                "sales fact monthly materialized view",
-                reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric");
-
-        loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL",
-                ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly),
-                "create table as select ", "plan", "id", "graph",
-                "ETL");
-    }
-
-    private Id createInstance(Referenceable referenceable) throws Exception {
-        String typeName = referenceable.getTypeName();
-
-        String entityJSON = InstanceSerialization.toJson(referenceable, true);
-        System.out.println("Submitting new entity= " + entityJSON);
-        JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON);
-        String guid = jsonObject.getString(MetadataServiceClient.GUID);
-        System.out.println("created instance for type " + typeName + ", guid: " + guid);
-
-        // return the Id for created instance with guid
-        return new Id(guid, referenceable.getId().getVersion(), referenceable.getTypeName());
-    }
-
-    Id database(String name, String description,
-                           String owner, String locationUri,
-                           String... traitNames) throws Exception {
-        Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames);
-        referenceable.set("name", name);
-        referenceable.set("description", description);
-        referenceable.set("owner", owner);
-        referenceable.set("locationUri", locationUri);
-        referenceable.set("createTime", System.currentTimeMillis());
-
-        return createInstance(referenceable);
-    }
-
-    Referenceable rawStorageDescriptor(String location, String inputFormat,
-                                       String outputFormat,
-                                       boolean compressed) throws Exception {
-        Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE);
-        referenceable.set("location", location);
-        referenceable.set("inputFormat", inputFormat);
-        referenceable.set("outputFormat", outputFormat);
-        referenceable.set("compressed", compressed);
-
-        return referenceable;
-    }
-
-    Referenceable rawColumn(String name, String dataType, String comment,
-                            String... traitNames) throws Exception {
-        Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames);
-        referenceable.set("name", name);
-        referenceable.set("dataType", dataType);
-        referenceable.set("comment", comment);
-
-        return referenceable;
-    }
-
-    Id table(String name, String description,
-             Id dbId, Referenceable sd,
-             String owner, String tableType,
-             List<Referenceable> columns,
-             String... traitNames) throws Exception {
-        Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames);
-        referenceable.set("name", name);
-        referenceable.set("description", description);
-        referenceable.set("owner", owner);
-        referenceable.set("tableType", tableType);
-        referenceable.set("createTime", System.currentTimeMillis());
-        referenceable.set("lastAccessTime", System.currentTimeMillis());
-        referenceable.set("retention", System.currentTimeMillis());
-        referenceable.set("db", dbId);
-        referenceable.set("sd", sd);
-        referenceable.set("columns", columns);
-
-        return createInstance(referenceable);
-    }
-
-    Id loadProcess(String name, String description, String user,
-                   List<Id> inputTables,
-                   List<Id> outputTables,
-                   String queryText, String queryPlan,
-                   String queryId, String queryGraph,
-                   String... traitNames) throws Exception {
-        Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames);
-        // super type attributes
-        referenceable.set("name", name);
-        referenceable.set("description", description);
-        referenceable.set("inputs", inputTables);
-        referenceable.set("outputs", outputTables);
-
-        referenceable.set("user", user);
-        referenceable.set("startTime", System.currentTimeMillis());
-        referenceable.set("endTime", System.currentTimeMillis() + 10000);
-
-        referenceable.set("queryText", queryText);
-        referenceable.set("queryPlan", queryPlan);
-        referenceable.set("queryId", queryId);
-        referenceable.set("queryGraph", queryGraph);
-
-        return createInstance(referenceable);
-    }
-
-    Id view(String name, Id dbId,
-            List<Id> inputTables,
-            String... traitNames) throws Exception {
-        Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames);
-        referenceable.set("name", name);
-        referenceable.set("db", dbId);
-
-        referenceable.set("inputTables", inputTables);
-
-        return createInstance(referenceable);
-    }
-
-    private void verifyTypesCreated() throws Exception {
-        List<String> types = metadataServiceClient.listTypes();
-        for (String type : TYPES) {
-            assert types.contains(type);
-        }
-    }
-
-    private String[] getDSLQueries() {
-        return new String[]{
-            "from DB",
-            "DB",
-            "DB where name=\"Reporting\"",
-            "DB where DB.name=\"Reporting\"",
-            "DB name = \"Reporting\"",
-            "DB DB.name = \"Reporting\"",
-            "DB where name=\"Reporting\" select name, owner",
-            "DB where DB.name=\"Reporting\" select name, owner",
-            "DB has name",
-            "DB where DB has name",
-            "DB, Table",
-            "DB is JdbcAccess",
-            /*
-            "DB, hive_process has name",
-            "DB as db1, Table where db1.name = \"Reporting\"",
-            "DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()},
-            */
-            "from Table",
-            "Table",
-            "Table is Dimension",
-            "Column where Column isa PII",
-            "View is Dimension",
-            /*"Column where Column isa PII select Column.name",*/
-            "Column select Column.name",
-            "Column select name",
-            "Column where Column.name=\"customer_id\"",
-            "from Table select Table.name",
-            "DB where (name = \"Reporting\")",
-            "DB where (name = \"Reporting\") select name as _col_0, owner as _col_1",
-            "DB where DB is JdbcAccess",
-            "DB where DB has name",
-            "DB Table",
-            "DB where DB has name",
-            "DB as db1 Table where (db1.name = \"Reporting\")",
-            "DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ",
-            /*
-            todo: does not work
-            "DB where (name = \"Reporting\") and ((createTime + 1) > 0)",
-            "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName",
-            "DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName",
-            "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName",
-            "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName",
-            */
-            // trait searches
-            "Dimension",
-            /*"Fact", - todo: does not work*/
-            "JdbcAccess",
-            "ETL",
-            "Metric",
-            "PII",
-            /*
-            // Lineage - todo - fix this, its not working
-            "Table hive_process outputTables",
-            "Table loop (hive_process outputTables)",
-            "Table as _loop0 loop (hive_process outputTables) withPath",
-            "Table as src loop (hive_process outputTables) as dest select src.name as srcTable, dest.name as destTable withPath",
-            */
-            "Table where name=\"sales_fact\", columns",
-            "Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment",
-            "from DataSet",
-            "from Process",
-        };
-    }
-
-    private void search() throws Exception {
-        for (String dslQuery : getDSLQueries()) {
-            JSONObject response = metadataServiceClient.searchEntity(dslQuery);
-            JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS);
-            if (!results.isNull("rows")) {
-                JSONArray rows = results.getJSONArray("rows");
-                System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows");
-            } else {
-                System.out.println("query [" + dslQuery + "] failed, results:" + results.toString());
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/hadoop/metadata/util/CredentialProviderUtility.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/util/CredentialProviderUtility.java b/webapp/src/main/java/org/apache/hadoop/metadata/util/CredentialProviderUtility.java
deleted file mode 100755
index a91db75..0000000
--- a/webapp/src/main/java/org/apache/hadoop/metadata/util/CredentialProviderUtility.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.metadata.util;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.metadata.web.service.SecureEmbeddedServer;
-import org.apache.hadoop.security.alias.CredentialProvider;
-import org.apache.hadoop.security.alias.CredentialProviderFactory;
-import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
-
-import java.io.*;
-import java.util.Arrays;
-
-import static org.apache.hadoop.metadata.security.SecurityProperties.*;
-
-/**
- * A utility class for generating a credential provider containing the entries required for supporting the SSL implementation
- * of the DGC server.
- */
-public class CredentialProviderUtility {
-    private static final String[] KEYS = new String[] {KEYSTORE_PASSWORD_KEY,
-            TRUSTSTORE_PASSWORD_KEY, SERVER_CERT_PASSWORD_KEY};
-
-    public static abstract class TextDevice {
-        public abstract void printf(String fmt, Object... params);
-
-        public abstract String readLine(String fmt, Object ... args);
-
-        public abstract char[] readPassword(String fmt, Object ... args);
-
-    }
-
-    private static TextDevice DEFAULT_TEXT_DEVICE = new TextDevice() {
-        Console console = System.console();
-
-        @Override
-        public void printf(String fmt, Object... params) {
-            console.printf(fmt, params);
-        }
-
-        @Override
-        public String readLine(String fmt, Object ... args) {
-            return console.readLine(fmt, args);
-        }
-
-        @Override
-        public char[] readPassword(String fmt, Object ... args) {
-            return console.readPassword(fmt, args);
-        }
-    };
-
-    public static TextDevice textDevice = DEFAULT_TEXT_DEVICE;
-
-    public static void main(String[] args) throws IOException {
-        // prompt for the provider name
-        CredentialProvider provider = getCredentialProvider(textDevice);
-
-        char[] cred;
-        for (String key : KEYS) {
-            cred = getPassword(textDevice, key);
-            // create a credential entry and store it
-            boolean overwrite = true;
-            if (provider.getCredentialEntry(key) != null) {
-                String choice = textDevice.readLine("Entry for %s already exists.  Overwrite? (y/n) [y]:", key);
-                overwrite = StringUtils.isEmpty(choice) || choice.equalsIgnoreCase("y");
-                if (overwrite) {
-                    provider.deleteCredentialEntry(key);
-                    provider.flush();
-                    provider.createCredentialEntry(key, cred);
-                    provider.flush();
-                    textDevice.printf("Entry for %s was overwritten with the new value.\n", key);
-                } else {
-                    textDevice.printf("Entry for %s was not overwritten.\n", key);
-                }
-            } else {
-                provider.createCredentialEntry(key, cred);
-                provider.flush();
-            }
-        }
-    }
-
-    /**
-     * Retrieves a password from the command line.
-     * @param textDevice  the system console.
-     * @param key   the password key/alias.
-     * @return  the password.
-     */
-    private static char[] getPassword(TextDevice textDevice, String key) {
-        boolean noMatch;
-        char[] cred = new char[0];
-        char[] passwd1;
-        char[] passwd2;
-        do {
-            passwd1 = textDevice.readPassword("Please enter the password value for %s:", key);
-            passwd2 = textDevice.readPassword("Please enter the password value for %s again:", key);
-            noMatch = !Arrays.equals(passwd1, passwd2);
-            if (noMatch) {
-                if (passwd1 != null) Arrays.fill(passwd1, ' ');
-                textDevice.printf("Password entries don't match. Please try again.\n");
-            } else {
-                if (passwd1.length == 0) {
-                    textDevice.printf("An empty password is not valid.  Please try again.\n");
-                    noMatch = true;
-                } else {
-                    cred = passwd1;
-                }
-            }
-            if (passwd2 != null) Arrays.fill(passwd2, ' ');
-        } while (noMatch);
-        return cred;
-    }
-
-    /**\
-     * Returns a credential provider for the entered JKS path.
-     * @param textDevice the system console.
-     * @return  the Credential provider
-     * @throws IOException
-     */
-    private static CredentialProvider getCredentialProvider(TextDevice textDevice) throws IOException {
-        String providerPath = textDevice.readLine("Please enter the full path to the credential provider:");
-        File file = new File(providerPath);
-        if (file.exists()) {
-            textDevice.printf("%s already exists.  You will need to specify whether existing entries should be overwritten " +
-                    "(default is 'yes')\n", providerPath);
-        }
-        String providerURI = JavaKeyStoreProvider.SCHEME_NAME + "://file" + providerPath;
-        Configuration conf = new Configuration(false);
-        conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, providerURI);
-        return CredentialProviderFactory.getProviders(conf).get(0);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/hadoop/metadata/web/errors/LoggingExceptionMapper.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/errors/LoggingExceptionMapper.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/errors/LoggingExceptionMapper.java
deleted file mode 100755
index f94b4cf..0000000
--- a/webapp/src/main/java/org/apache/hadoop/metadata/web/errors/LoggingExceptionMapper.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.web.errors;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.Response;
-import javax.ws.rs.ext.ExceptionMapper;
-import java.util.concurrent.ThreadLocalRandom;
-
-/**
- * Exception mapper for Jersey.
- * @param <E>
- */
-public class LoggingExceptionMapper<E extends Throwable> implements ExceptionMapper<E> {
-    private static final Logger LOGGER = LoggerFactory.getLogger(LoggingExceptionMapper.class);
-
-    @Override
-    public Response toResponse(E exception) {
-        if (exception instanceof WebApplicationException) {
-            return ((WebApplicationException) exception).getResponse();
-        }
-
-        final long id = ThreadLocalRandom.current().nextLong();
-        logException(id, exception);
-        return Response.serverError()
-                .entity(formatErrorMessage(id, exception))
-                .build();
-    }
-
-    @SuppressWarnings("UnusedParameters")
-    protected String formatErrorMessage(long id, E exception) {
-        return String.format(
-            "There was an error processing your request. It has been logged (ID %016x).", id);
-    }
-
-    protected void logException(long id, E exception) {
-        LOGGER.error(formatLogMessage(id, exception), exception);
-    }
-
-    @SuppressWarnings("UnusedParameters")
-    protected String formatLogMessage(long id, Throwable exception) {
-        return String.format("Error handling a request: %016x", id);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuditFilter.java
----------------------------------------------------------------------
diff --git a/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuditFilter.java b/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuditFilter.java
deleted file mode 100755
index 3ee86d5..0000000
--- a/webapp/src/main/java/org/apache/hadoop/metadata/web/filters/AuditFilter.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.metadata.web.filters;
-
-import com.google.inject.Singleton;
-import org.apache.hadoop.metadata.MetadataServiceClient;
-import org.apache.hadoop.metadata.web.util.DateTimeHelper;
-import org.apache.hadoop.metadata.web.util.Servlets;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import java.util.Date;
-import java.util.UUID;
-
-/**
- * This records audit information as part of the filter after processing the request
- * and also introduces a UUID into request and response for tracing requests in logs.
- */
-@Singleton
-public class AuditFilter implements Filter {
-
-    private static final Logger AUDIT_LOG = LoggerFactory.getLogger("AUDIT");
-    private static final Logger LOG = LoggerFactory.getLogger(AuditFilter.class);
-
-    @Override
-    public void init(FilterConfig filterConfig) throws ServletException {
-        LOG.info("AuditFilter initialization started");
-    }
-
-    @Override
-    public void doFilter(ServletRequest request,
-                         ServletResponse response,
-                         FilterChain filterChain) throws IOException, ServletException {
-        final String requestTimeISO9601 = DateTimeHelper.formatDateUTC(new Date());
-        final HttpServletRequest httpRequest = (HttpServletRequest) request;
-        final String requestId = UUID.randomUUID().toString();
-        final Thread currentThread = Thread.currentThread();
-        final String oldName = currentThread.getName();
-
-        try {
-            currentThread.setName(formatName(oldName, requestId));
-            recordAudit(httpRequest, requestTimeISO9601);
-            filterChain.doFilter(request, response);
-        } finally {
-            // put the request id into the response so users can trace logs for this request
-            ((HttpServletResponse) response).setHeader(MetadataServiceClient.REQUEST_ID, requestId);
-            currentThread.setName(oldName);
-        }
-    }
-
-    private String formatName(String oldName, String requestId) {
-        return oldName + " - " + requestId;
-    }
-
-    private void recordAudit(HttpServletRequest httpRequest, String whenISO9601) {
-        final String who = getUserFromRequest(httpRequest);
-        final String fromHost = httpRequest.getRemoteHost();
-        final String fromAddress = httpRequest.getRemoteAddr();
-        final String whatURL = Servlets.getRequestURL(httpRequest);
-        final String whatAddrs = httpRequest.getLocalAddr();
-
-        LOG.debug("Audit: {}/{} performed request {} ({}) at time {}",
-                who, fromAddress, whatURL, whatAddrs, whenISO9601);
-        audit(who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601);
-    }
-
-    private String getUserFromRequest(HttpServletRequest httpRequest) {
-        // look for the user in the request
-        final String userFromRequest = Servlets.getUserFromRequest(httpRequest);
-        return userFromRequest == null ? "UNKNOWN" : userFromRequest;
-    }
-
-    private void audit(String who, String fromAddress, String fromHost, String whatURL,
-                       String whatAddrs, String whenISO9601) {
-        AUDIT_LOG.info("Audit: {}/{}-{} performed request {} ({}) at time {}",
-                who, fromAddress, fromHost, whatURL, whatAddrs, whenISO9601);
-    }
-
-    @Override
-    public void destroy() {
-        // do nothing
-    }
-}
\ No newline at end of file


Mime
View raw message