atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mad...@apache.org
Subject [19/25] incubator-atlas git commit: ATLAS-1898: initial commit of ODF
Date Wed, 28 Jun 2017 05:57:32 GMT
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationDeserializer.java
----------------------------------------------------------------------
diff --git a/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationDeserializer.java b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationDeserializer.java
new file mode 100755
index 0000000..6ea9c97
--- /dev/null
+++ b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationDeserializer.java
@@ -0,0 +1,165 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.json;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map.Entry;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import org.apache.atlas.odf.api.metadata.models.Annotation;
+import org.apache.atlas.odf.api.metadata.models.ClassificationAnnotation;
+import org.apache.atlas.odf.api.metadata.models.ProfilingAnnotation;
+import org.apache.atlas.odf.api.metadata.models.RelationshipAnnotation;
+
+/**
+ * The Jackson deserializer for Annotation objects
+ * 
+ *
+ */
+public class AnnotationDeserializer extends StdDeserializer<Annotation> {
+
+	private static final long serialVersionUID = -3143233438847937374L;
+	
+	Logger logger = Logger.getLogger(AnnotationDeserializer.class.getName());
+	
+	public AnnotationDeserializer() {
+		super(Annotation.class);
+	}
+
+	ClassLoader getClassLoader() {
+		return this.getClass().getClassLoader();
+	}
+	
+	@Override
+	public Annotation deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
+		ObjectMapper jpom = ((ObjectMapper) jp.getCodec());
+		ObjectNode tree = jpom.readTree(jp);
+		String jsonString = tree.toString();
+		Annotation result = null;
+
+		Class<? extends Annotation> javaClass = null;
+		JsonNode javaClassNode = tree.get("javaClass");
+		if (javaClassNode == null) {
+			throw new IOException("Can not deserialize object since the javaClass attribute is missing: " + jsonString);
+		}
+		JsonNode jsonPropertiesNode = tree.get("jsonProperties");
+		String javaClassName = javaClassNode.asText();
+		if (javaClassName.equals(ProfilingAnnotation.class.getName())) {
+			javaClass = ProfilingAnnotation.class;
+		}
+		else if (javaClassName.equals(ClassificationAnnotation.class.getName())) {
+			javaClass = ClassificationAnnotation.class;
+		}
+		else if (javaClassName.equals(RelationshipAnnotation.class.getName())) {
+			javaClass = RelationshipAnnotation.class;
+		}
+		else {
+			try {
+				javaClass = (Class<? extends Annotation>) this.getClassLoader().loadClass(javaClassName);
+				if (jsonPropertiesNode != null && !jsonPropertiesNode.isNull()) { // unfold jsonProperties in case of specific annotations
+					JsonNode jsonPropertiesNodeUnfolded = null;
+					if (jsonPropertiesNode.isTextual()) {
+						jsonPropertiesNodeUnfolded = jpom.readTree(jsonPropertiesNode.asText());					
+					}
+					else {
+						jsonPropertiesNodeUnfolded = jsonPropertiesNode; 
+					}
+					JsonNode newJsonPropertiesNode = (JsonNode)jp.getCodec().createObjectNode();    // initialize new jsonProperties node
+					Field classFields[] = javaClass.getDeclaredFields();
+					HashSet<String> classFieldSet = new HashSet<String>();
+					for (Field f: classFields) {
+						f.setAccessible(true);
+						String fieldName = f.getName();
+						classFieldSet.add(fieldName);
+					}
+					Iterator<Entry<String,JsonNode>> jsonPropertiesFields = jsonPropertiesNodeUnfolded.fields();
+					while (jsonPropertiesFields.hasNext()) { 
+						Entry<String,JsonNode> field = jsonPropertiesFields.next();
+						String fieldName = field.getKey();
+						if (JSONUtils.annotationFields.contains(fieldName)) {
+							throw new IOException("Name conflict: Field name in jsonProperties matches predefined field [" + fieldName + "]");
+						}
+						JsonNode fieldValue = field.getValue();
+						if (classFieldSet.contains(fieldName)) {
+							tree.set(fieldName, fieldValue);							
+						}
+						else {
+							((ObjectNode)newJsonPropertiesNode).set(fieldName, field.getValue());							
+						}
+					}
+					tree.put("jsonProperties", newJsonPropertiesNode.textValue());
+				}
+			} catch (ClassNotFoundException exc) {
+				String msg = MessageFormat.format("Java class ''{0}'' could not be deserialized automatically (probably because it is not on the classpath)", javaClassName);
+				logger.warning(msg);
+				logger.log(Level.FINE, msg, exc);
+			}
+			if (javaClass == null) {
+				if (tree.get("profiledObject") != null) {   // class not found -> create as instance of corresponding 'unknown' types
+					javaClass = ProfilingAnnotation.class;
+				}
+				else if (tree.get("classifiedObject") != null) {
+					javaClass = ClassificationAnnotation.class;
+				}
+				else if (tree.get("relatedObjects") != null) {
+					javaClass = RelationshipAnnotation.class;
+				}
+				else { // malformed annotation
+					javaClass = Annotation.class;
+				}
+				if (jsonPropertiesNode == null) {
+					jsonPropertiesNode = (JsonNode)jp.getCodec().createObjectNode(); // initialize if not already present
+				}
+				Iterator<Entry<String,JsonNode>> fields = tree.fields();
+				ArrayList<String> fieldsToRemove = new ArrayList<String>();
+				try {
+					while (fields.hasNext()) {     // move all fields not present in the predefined annotation types
+						Entry<String,JsonNode> field = fields.next();   // to the string valued jsonProperties attribute
+						String fieldName = field.getKey();
+						if (!JSONUtils.annotationFields.contains(fieldName)) {
+							((ObjectNode)jsonPropertiesNode).set(fieldName, field.getValue());
+							fieldsToRemove.add(fieldName);
+						}
+					}
+					String jsonProperties = (jsonPropertiesNode.isTextual()) ? jsonPropertiesNode.textValue() : jsonPropertiesNode.toString();
+					tree.put("jsonProperties", jsonProperties); 
+					for (String fieldToRemove:fieldsToRemove) {  // remove fields not present in the predefined annotation types
+						tree.remove(fieldToRemove);
+					}
+				}
+				catch (Exception e) {
+					throw new IOException(e);
+				}
+			}
+			jsonString = tree.toString();				
+		}
+		result = jpom.readValue(jsonString, javaClass);
+		logger.log(Level.FINEST, "Annotation created. Original: {0}, deserialized annotation: {1}", new Object[]{ jsonString, JSONUtils.lazyJSONSerializer(result)});
+		return result;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationSerializer.java
----------------------------------------------------------------------
diff --git a/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationSerializer.java b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationSerializer.java
new file mode 100755
index 0000000..6fcc28e
--- /dev/null
+++ b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/AnnotationSerializer.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.json;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.logging.Logger;
+
+import org.apache.atlas.odf.api.metadata.models.Annotation;
+import org.apache.wink.json4j.JSONException;
+import org.apache.wink.json4j.JSONObject;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+
+/**
+ * The Jackson serializer for Annotation objects
+ * 
+ *
+ */
+public class AnnotationSerializer extends StdSerializer<Annotation> {
+
+	public AnnotationSerializer() {
+		this(null);
+	}
+	
+	public AnnotationSerializer(Class<Annotation> t) {
+		super(t);
+	}
+	
+	Logger logger = Logger.getLogger(AnnotationSerializer.class.getName());
+
+	ClassLoader getClassLoader() {
+		return this.getClass().getClassLoader();
+	}
+	
+	// In the following jsonProperties is either already pre-populated (because we are serializing an instance of ProfilingAnnotation, ....
+	// or it is created from all attributes not present in ProfilingAnnotation, or its ancestors (e.g. serializing an instance of ColumnAnalysisColumnAnntation)
+	// in the latter case jsonProperties is expected to be null
+	
+	@Override
+	public void serialize(Annotation annot, JsonGenerator jg, SerializerProvider sp) throws IOException, JsonProcessingException {
+		jg.writeStartObject();
+		Class<?> cl = annot.getClass();
+		class JSONPropField {
+			String name;
+			Object value;
+			JSONPropField(String name, Object value) {this.name = name; this.value = value;}
+		}
+		ArrayList<JSONPropField> jsonPropFields = null;
+		String jsonPropertiesValue = null;
+		while (cl != Object.class) {   // process class hierarchy up to and including MetaDataObject.class
+			Field fields[] = cl.getDeclaredFields();
+			for (Field f: fields) {
+				f.setAccessible(true);
+				String fieldName = f.getName();
+				try {
+					Object fieldValue = f.get(annot);
+					if (fieldName.equals("jsonProperties")) {
+						jsonPropertiesValue = (String)fieldValue;
+					}
+					else if (JSONUtils.annotationFields.contains(fieldName)) {
+						jg.writeFieldName(fieldName);
+						jg.writeObject(fieldValue);							
+					}
+					else {
+						if (jsonPropFields == null) jsonPropFields = new ArrayList<JSONPropField>();
+						jsonPropFields.add(new JSONPropField(fieldName, fieldValue));
+					}
+				}
+				catch (IllegalAccessException e) {
+					throw new IOException(e);
+				}
+			}
+			cl = cl.getSuperclass();
+		}
+		jg.writeFieldName("jsonProperties");
+		if (jsonPropFields != null) {
+			jg.writeStartObject();
+			if (jsonPropertiesValue != null) {
+				try {
+					JSONObject jo = new JSONObject(jsonPropertiesValue);
+					Iterator<String> it = jo.keys();
+			         while(it.hasNext()) {
+			             String key = it.next();
+			             jg.writeFieldName(key);
+			             jg.writeObject(jo.get(key));
+					}					
+				}
+				catch (JSONException e) {
+					throw new IOException(e);					
+				}
+			}
+			for (JSONPropField jpf:jsonPropFields) {
+				jg.writeFieldName(jpf.name);
+				jg.writeObject(jpf.value);								
+			}
+			jg.writeEndObject();				
+		}
+		else {
+			jg.writeString(jsonPropertiesValue);
+		}
+		jg.writeEndObject();
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-api/src/main/java/org/apache/atlas/odf/json/DefaultODFDeserializer.java
----------------------------------------------------------------------
diff --git a/odf/odf-api/src/main/java/org/apache/atlas/odf/json/DefaultODFDeserializer.java b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/DefaultODFDeserializer.java
new file mode 100755
index 0000000..d1ae80e
--- /dev/null
+++ b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/DefaultODFDeserializer.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.json;
+
+import java.io.IOException;
+import java.text.MessageFormat;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+
+public class DefaultODFDeserializer<T> extends StdDeserializer<T> {
+	private static final long serialVersionUID = 4895771352050172936L;
+
+	Logger logger = Logger.getLogger(DefaultODFDeserializer.class.getName());
+
+	Class<? extends T> defaultClass;
+
+	public DefaultODFDeserializer(Class<T> cl, Class<? extends T> defaultClass) {
+		super(cl);
+		this.defaultClass = defaultClass;
+	}
+
+	ClassLoader getClassLoader() {
+		return this.getClass().getClassLoader();
+	}
+
+	@Override
+	public T deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
+		ObjectMapper jpom = ((ObjectMapper) jp.getCodec());
+		JsonNode tree = jpom.readTree(jp);
+		String jsonString = tree.toString();
+
+		Class<? extends T> javaClass = null;
+		String javaClassName = null;
+		try {
+			JsonNode javaClassNode = tree.get("javaClass");
+			javaClassName = javaClassNode.asText();
+			logger.log(Level.FINEST, "Trying to deserialize object of java class {0}", javaClassName);
+			javaClass = (Class<? extends T>) this.getClassLoader().loadClass(javaClassName);
+			if (javaClass != null) {
+				if (!javaClass.equals(this.handledType())) {
+					return jpom.readValue(jsonString, javaClass);
+				}
+			}
+		} catch (Exception exc) {
+			String msg = MessageFormat.format("Java class ''{0}'' could not be deserialized automatically (probably because it is not on the classpath)", javaClassName);
+			logger.warning(msg);
+			logger.log(Level.FINE, msg, exc);
+		}
+		return jpom.readValue(jsonString, defaultClass);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-api/src/main/java/org/apache/atlas/odf/json/JSONUtils.java
----------------------------------------------------------------------
diff --git a/odf/odf-api/src/main/java/org/apache/atlas/odf/json/JSONUtils.java b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/JSONUtils.java
new file mode 100755
index 0000000..fe9d592
--- /dev/null
+++ b/odf/odf-api/src/main/java/org/apache/atlas/odf/json/JSONUtils.java
@@ -0,0 +1,254 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.json;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+
+import org.apache.wink.json4j.JSONArray;
+import org.apache.wink.json4j.JSONException;
+import org.apache.wink.json4j.JSONObject;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.Module;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+import org.apache.atlas.odf.api.metadata.UnknownMetaDataObject;
+import org.apache.atlas.odf.api.metadata.models.Annotation;
+import org.apache.atlas.odf.api.metadata.models.ClassificationAnnotation;
+import org.apache.atlas.odf.api.metadata.models.Connection;
+import org.apache.atlas.odf.api.metadata.models.ConnectionInfo;
+import org.apache.atlas.odf.api.metadata.models.DataSet;
+import org.apache.atlas.odf.api.metadata.models.DataStore;
+import org.apache.atlas.odf.api.metadata.models.MetaDataObject;
+import org.apache.atlas.odf.api.metadata.models.ProfilingAnnotation;
+import org.apache.atlas.odf.api.metadata.models.RelationshipAnnotation;
+import org.apache.atlas.odf.api.metadata.models.UnknownDataSet;
+import org.apache.atlas.odf.api.metadata.models.UnknownConnection;
+import org.apache.atlas.odf.api.metadata.models.UnknownConnectionInfo;
+import org.apache.atlas.odf.api.metadata.models.UnknownDataStore;
+
+public class JSONUtils {
+	
+	public static HashSet<String> annotationFields = new HashSet<String>();
+	
+	static {
+		for (Class<?> cl: new Class<?>[]{Annotation.class, ProfilingAnnotation.class, ClassificationAnnotation.class,RelationshipAnnotation.class}) {
+			while (cl != Object.class) {   // process class hierarchy up to and including MetaDataObject.class
+				Field fields[] = cl.getDeclaredFields();
+				for (Field f: fields) {
+					f.setAccessible(true);
+					annotationFields.add(f.getName());
+				}
+				cl = cl.getSuperclass();
+			}			
+		}
+	}
+
+
+
+	// reuse object mapper for performance
+	private static ObjectMapper om = null;
+
+	static {
+		om = new ObjectMapper();
+		Module mod = createDefaultObjectMapperModule();
+		om.registerModule(mod);
+	}
+
+	public static ObjectMapper getGlobalObjectMapper() {
+		return om;
+	}
+
+	static Module createDefaultObjectMapperModule() {
+		SimpleModule mod = new SimpleModule("ODF Jackson module", Version.unknownVersion());
+		mod.addDeserializer(Annotation.class, new AnnotationDeserializer());
+		mod.addDeserializer(MetaDataObject.class, new DefaultODFDeserializer<MetaDataObject>(MetaDataObject.class, UnknownMetaDataObject.class));
+		mod.addDeserializer(DataSet.class, new DefaultODFDeserializer<DataSet>(DataSet.class, UnknownDataSet.class));
+		mod.addDeserializer(DataStore.class, new DefaultODFDeserializer<DataStore>(DataStore.class, UnknownDataStore.class));
+		mod.addDeserializer(Connection.class, new DefaultODFDeserializer<Connection>(Connection.class, UnknownConnection.class));
+		mod.addDeserializer(ConnectionInfo.class, new DefaultODFDeserializer<ConnectionInfo>(ConnectionInfo.class, UnknownConnectionInfo.class));
+		
+		mod.addSerializer(Annotation.class, new AnnotationSerializer());
+		return mod;
+
+	}
+	
+	public static JSONObject toJSONObject(Object o) throws JSONException {
+		JSONObject result;
+		try {
+			result = new JSONObject(om.writeValueAsString(o));
+			if (o instanceof Annotation) {
+				Object jsonPropsObject = result.get("jsonProperties");
+				if (jsonPropsObject instanceof JSONObject) {    // the value of jsonProperties must be of type 'String'
+					result.put("jsonProperties", ((JSONObject)jsonPropsObject).toString());	
+				}
+			}
+		} catch (JsonProcessingException e) {
+			throw new JSONException(e);
+		}
+		return result;
+	}
+
+	public static String toJSON(Object o) throws JSONException {
+		String result;
+		try {
+			result = om.writeValueAsString(o);
+			if (o instanceof Annotation) {
+				JSONObject json = new JSONObject(result);
+				Object jsonPropsObject = json.get("jsonProperties");
+				if (jsonPropsObject instanceof JSONObject) {    // the value of jsonProperties must be of type 'String'
+					json.put("jsonProperties", ((JSONObject)jsonPropsObject).toString());	
+					result = json.toString();
+				}
+			}
+		} catch (JsonProcessingException e) {
+			throw new JSONException(e);
+		}
+		return result;
+	}
+
+	public static <T> List<T> fromJSONList(String s, Class<T> cl) throws JSONException {
+		JSONArray ar = new JSONArray(s);
+		List<T> result = new ArrayList<>();
+		for (Object o : ar) {
+			JSONObject jo = (JSONObject) o;
+			T t = (T) fromJSON(jo.write(), cl);
+			result.add(t);
+		}
+		return result;
+
+	}
+
+	public static <T> List<T> fromJSONList(InputStream is, Class<T> cl) throws JSONException {
+		JSONArray ar = new JSONArray(is);
+		List<T> result = new ArrayList<>();
+		for (Object o : ar) {
+			JSONObject jo = (JSONObject) o;
+			T t = (T) fromJSON(jo.write(), cl);
+			result.add(t);
+		}
+		return result;
+	}
+
+	public static <T> T fromJSON(String s, Class<T> cl) throws JSONException {
+		T result = null;
+		try {
+			result = om.readValue(s, cl);
+		} catch (JsonProcessingException exc) {
+			// propagate JSON exception
+			throw new JSONException(exc);
+		} catch (IOException e) {
+			throw new RuntimeException(e);
+		}
+
+		return result;
+	}
+
+	public static <T> T fromJSON(InputStream is, Class<T> cl) throws JSONException {
+		return fromJSON(getInputStreamAsString(is, "UTF-8"), cl);
+	}
+
+	public static <T> T readJSONObjectFromFileInClasspath(Class<T> cl, String pathToFile, ClassLoader classLoader) {
+		if (classLoader == null) {
+			// use current classloader if not provided
+			classLoader = JSONUtils.class.getClassLoader();
+		}
+		InputStream is = classLoader.getResourceAsStream(pathToFile);
+		T result = null;
+		try {
+			result = om.readValue(is, cl);
+		} catch (IOException e) {
+			// assume that this is a severe error since the provided JSONs should be correct
+			throw new RuntimeException(e);
+		}
+
+		return result;
+	}
+
+	public static <T> T cloneJSONObject(T obj) throws JSONException {
+		// special case: use Annotation.class in case obj is an annotation subclass to ensure that the annotation deserializer is used
+		if (Annotation.class.isAssignableFrom(obj.getClass())) {
+			return (T) fromJSON(toJSON(obj), Annotation.class);
+		}
+		return fromJSON(toJSON(obj), (Class<T>) obj.getClass());
+	}
+
+	
+	public static void mergeJSONObjects(JSONObject source, JSONObject target) {
+		if (source != null && target != null) {
+			target.putAll(source);
+		}
+	}
+
+	// use this method, e.g., if you want to use JSON objects in log / trace messages
+	// and want to do serialization only if tracing is on
+	public static Object lazyJSONSerializer(final Object jacksonObject) {
+		return new Object() {
+
+			@Override
+			public String toString() {
+				try {
+					return toJSON(jacksonObject);
+				} catch (JSONException e) {
+					return e.getMessage();
+				}
+			}
+
+		};
+	}
+
+	public static Object jsonObject4Log(final JSONObject obj) {
+		return new Object() {
+
+			@Override
+			public String toString() {
+				try {
+					return obj.write();
+				} catch (Exception e) {
+					return e.getMessage();
+				}
+			}
+
+		};
+	}
+
+	public static String getInputStreamAsString(InputStream is, String encoding) {
+		try {
+			final int n = 2048;
+			byte[] b = new byte[0];
+			byte[] temp = new byte[n];
+			int bytesRead;
+			while ((bytesRead = is.read(temp)) != -1) {
+				byte[] newB = new byte[b.length + bytesRead];
+				System.arraycopy(b, 0, newB, 0, b.length);
+				System.arraycopy(temp, 0, newB, b.length, bytesRead);
+				b = newB;
+			}
+			String s = new String(b, encoding);
+			return s;
+		} catch (IOException exc) {
+			throw new RuntimeException(exc);
+		}
+	}
+	
+	public static <T, S> T convert(S source, Class<T> targetClass) throws JSONException {
+		return fromJSON(toJSON(source), targetClass);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-api/src/test/java/org/apache/atlas/odf/test/json/ODFJSONSerializationTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-api/src/test/java/org/apache/atlas/odf/test/json/ODFJSONSerializationTest.java b/odf/odf-api/src/test/java/org/apache/atlas/odf/test/json/ODFJSONSerializationTest.java
new file mode 100755
index 0000000..da8d3af
--- /dev/null
+++ b/odf/odf-api/src/test/java/org/apache/atlas/odf/test/json/ODFJSONSerializationTest.java
@@ -0,0 +1,406 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.test.json;
+
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.UUID;
+import java.util.logging.Logger;
+
+import org.apache.atlas.odf.api.metadata.InvalidReference;
+import org.apache.atlas.odf.api.metadata.StoredMetaDataObject;
+import org.apache.wink.json4j.JSON;
+import org.apache.wink.json4j.JSONException;
+import org.apache.wink.json4j.JSONObject;
+import org.junit.Assert;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceEndpoint;
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceProperties;
+import org.apache.atlas.odf.api.metadata.MetaDataObjectReference;
+import org.apache.atlas.odf.api.metadata.models.Annotation;
+import org.apache.atlas.odf.api.metadata.models.ClassificationAnnotation;
+import org.apache.atlas.odf.api.metadata.models.JDBCConnection;
+import org.apache.atlas.odf.api.metadata.models.JDBCConnectionInfo;
+import org.apache.atlas.odf.api.metadata.models.MetaDataCache;
+import org.apache.atlas.odf.api.metadata.models.MetaDataObject;
+import org.apache.atlas.odf.api.metadata.models.Column;
+import org.apache.atlas.odf.api.metadata.models.Connection;
+import org.apache.atlas.odf.api.metadata.models.ConnectionInfo;
+import org.apache.atlas.odf.api.metadata.models.DataFile;
+import org.apache.atlas.odf.api.metadata.models.DataSet;
+import org.apache.atlas.odf.api.metadata.models.DataStore;
+import org.apache.atlas.odf.api.metadata.models.Database;
+import org.apache.atlas.odf.api.metadata.models.Table;
+import org.apache.atlas.odf.api.metadata.models.ProfilingAnnotation;
+import org.apache.atlas.odf.api.metadata.models.RelationshipAnnotation;
+import org.apache.atlas.odf.api.metadata.models.UnknownDataSet;
+import org.apache.atlas.odf.json.JSONUtils;
+
+public class ODFJSONSerializationTest {
+
+	Logger logger = Logger.getLogger(ODFJSONSerializationTest.class.getName());
+
+	MetaDataObjectReference createNewRef() {
+		MetaDataObjectReference ref = new MetaDataObjectReference();
+		ref.setId(UUID.randomUUID().toString());
+		ref.setRepositoryId("odftestrepositoryid");
+		return ref;
+	}
+
+	static class NewAnnotation extends ProfilingAnnotation {
+		String newProp;
+
+		public String getNewProp() {
+			return newProp;
+		}
+
+		public void setNewProp(String newProp) {
+			this.newProp = newProp;
+		}
+
+	}
+
+	List<MetaDataObject> createTestObjects() throws JSONException, ParseException {
+		List<MetaDataObject> testObjects = new ArrayList<>();
+
+		Column col = new Column();
+		MetaDataObjectReference colref = createNewRef();
+		col.setReference(colref);
+		col.setName("col1");
+		col.setDescription("column desc");
+		col.setDataType("theDatatype");
+
+		Table t = new Table();
+		MetaDataObjectReference tableRef = createNewRef();
+		t.setReference(tableRef);
+		t.setName("Table");
+		t.setDescription("table desc");
+
+		Database db = new Database();
+		MetaDataObjectReference dbref = createNewRef();
+		db.setReference(dbref);
+		db.setName("DB");
+		db.setDescription("db description");
+
+		JDBCConnection jdbcConn = new JDBCConnection();
+		MetaDataObjectReference jdbcConnRef = createNewRef();
+		jdbcConn.setReference(jdbcConnRef);
+		jdbcConn.setName("jdbc connection");
+		jdbcConn.setUser("theUser");
+		jdbcConn.setPassword("thePassword");
+		jdbcConn.setJdbcConnectionString("jdbc:db2:localhost:50000/SAMPLE");
+		db.setConnections(Collections.singletonList(jdbcConnRef));
+
+		ProfilingAnnotation profAnnot1 = new ProfilingAnnotation();
+		MetaDataObjectReference uaRef = createNewRef();
+		profAnnot1.setReference(uaRef);
+		profAnnot1.setProfiledObject(jdbcConnRef);
+		profAnnot1.setJsonProperties("{\"a\": \"b\"}");
+
+		ProfilingAnnotation profAnnot2 = new ProfilingAnnotation();
+		MetaDataObjectReference mdoRef = createNewRef();
+		profAnnot2.setReference(mdoRef);
+		profAnnot2.setProfiledObject(jdbcConnRef);
+		profAnnot2.setJsonProperties("{\"a\": \"b\"}");
+
+		NewAnnotation newAnnot = new NewAnnotation();
+		MetaDataObjectReference newAnnotRef = createNewRef();
+		newAnnot.setReference(newAnnotRef);
+
+		// a generic DataSet
+		UnknownDataSet ds = new UnknownDataSet();
+		ds.setName("generic data set");
+		ds.setReference(createNewRef());
+
+		MetaDataObject[] mdos = new MetaDataObject[] { db, jdbcConn, t, col, profAnnot1, profAnnot2, newAnnot, ds };
+		testObjects.addAll(Arrays.asList(mdos));
+		return testObjects;
+	}
+
+	@Test
+	public void testSerialization() throws Exception {
+		List<MetaDataObject> testObjects = createTestObjects();
+
+		for (MetaDataObject testObject : testObjects) {
+			Class<?> cl = testObject.getClass();
+			logger.info("Testing serialization / deserialization of object: " + testObject + " of class: " + cl);
+
+			String json = JSONUtils.toJSON(testObject);
+			logger.info("Serialized json: " + json);
+
+			Object objStronglyTypedClass;
+			if (testObject instanceof Annotation) { // special treatment for Annotations -> 2nd arg of fromJSON() needs to be Annotation.class
+				objStronglyTypedClass = JSONUtils.fromJSON(json, Annotation.class);
+				Assert.assertEquals(cl, objStronglyTypedClass.getClass());
+			}
+			else {
+				 objStronglyTypedClass = JSONUtils.fromJSON(json, cl);
+				 Assert.assertEquals(cl, objStronglyTypedClass.getClass());
+			}
+			String json1 = JSONUtils.toJSON(objStronglyTypedClass);
+			Assert.assertEquals(json, json1);
+
+			Object objWithGenericClass = JSONUtils.fromJSON(json, MetaDataObject.class);
+
+			Assert.assertEquals(cl, objWithGenericClass.getClass());
+			String json2 = JSONUtils.toJSON(objWithGenericClass);
+			Assert.assertEquals(json, json2);
+
+			Class<?> intermediateClasses[] = new Class<?>[] { MetaDataObject.class, DataSet.class, DataStore.class, Connection.class };
+
+			for (Class<?> intermediateClass : intermediateClasses) {
+				logger.info("Checking intermediate class: " + intermediateClass);
+				if (intermediateClass.isAssignableFrom(cl)) {
+
+					Object intermediateObject = JSONUtils.fromJSON(json, intermediateClass);
+					logger.info("Deserialized object: " + intermediateObject);
+					logger.info("Deserialized object class: " + intermediateObject.getClass());
+
+					Assert.assertTrue(intermediateClass.isAssignableFrom(intermediateObject.getClass()));
+					Assert.assertEquals(cl, intermediateObject.getClass());
+					String json3 = JSONUtils.toJSON(intermediateObject);
+					Assert.assertEquals(json, json3);
+				}
+			}
+
+		}
+	}
+
+	/**
+	 * Test serialization of an Annotation (subclass) which has both, its own fields (to be mapped to jsonProperties) and
+	 * a non-empty jsonProperties attribute holding the string representation of a Json object.
+	 */
+
+	@Test
+	public void testJsonPropertiesMerge() {
+		NewAnnotation annot = new NewAnnotation();
+		MetaDataObjectReference ref = new MetaDataObjectReference();
+		ref.setId("id");
+		ref.setRepositoryId("repoid");
+		ref.setUrl("http://url");
+		annot.setProfiledObject(ref);
+		annot.setNewProp("newPropValue");
+		annot.setJsonProperties("{\"oldProp\":\"oldPropValue\"}");
+		JSONObject jo = null;
+		try {
+			jo = JSONUtils.toJSONObject(annot);
+			String jsonPropertiesString = jo.getString("jsonProperties");
+			JSONObject jo2 = new JSONObject(jsonPropertiesString);
+			Assert.assertEquals("oldPropValue", jo2.get("oldProp"));
+			Assert.assertEquals("newPropValue", jo2.get("newProp"));
+		}
+		catch (JSONException e) {
+			e.printStackTrace();
+		}
+	}
+
+	final static private String MERGED_JSON = "{" +
+			"\"analysisRun\":null," +
+			"\"summary\":null," +
+			"\"reference\":null," +
+			"\"originRef\":null," +
+			"\"replicaRefs\":null," +
+			"\"javaClass\":\"org.apache.atlas.odf.json.test.ODFJSONSerializationTest$NewAnnotation\"," +
+			"\"jsonProperties\":\"{" +
+			   "\\\"newProp\\\":\\\"newPropValue\\\"," +
+			   "\\\"oldProp\\\":\\\"oldPropValue\\\"" +
+			   "}\"," +
+			"\"name\":null," +
+			"\"annotationType\":\"NewAnnotation\"," +
+			"\"description\":null," +
+			"\"profiledObject\":{" +
+			   "\"repositoryId\":\"repoid\"," +
+			   "\"id\":\"id\"," +
+			   "\"url\":\"http://url\"}" +
+	        "}";
+
+	/**
+	 * Test deserialization of a Json object which has fields in its jsonProperties that can not be mapped to native fields of
+	 * the target class (= value of javaClass field). These and only these remain as fields in the text encoded Json object
+	 * stored in the jsonProperties field of the result.
+	 */
+
+	@Test
+	@Ignore
+	public void testJsonPropertiesUnmerge() throws Exception {
+		logger.info("Deserializing JSON: " + MERGED_JSON);
+		Annotation annot = JSONUtils.fromJSON(MERGED_JSON, Annotation.class);
+		Assert.assertTrue(annot instanceof NewAnnotation);
+		NewAnnotation newAnnot = (NewAnnotation) annot;
+		Assert.assertEquals("newPropValue", newAnnot.getNewProp());
+		JSONObject props = (JSONObject) JSON.parse(annot.getJsonProperties());
+
+		Assert.assertNotNull(props.get("oldProp"));
+		Assert.assertEquals("oldPropValue", props.get("oldProp"));
+
+		JSONObject jo = JSONUtils.toJSONObject(annot);
+		Assert.assertEquals(MERGED_JSON, jo.toString());
+	}
+
+	final private static String PROFILING_ANNOTATION_JSON = "{" +
+			"\"profiledObject\": null," +
+			"\"annotationType\": \"MySubType1\"," +
+			"\"javaClass\": \"org.apache.atlas.odf.core.integrationtest.metadata.atlas.MySubType1\"," +
+			"\"analysisRun\": \"bla\"," +
+			"\"newProp1\": 42," +
+			"\"newProp2\": \"hi\"," +
+			"\"newProp3\": \"hello\"" +
+		"}";
+
+	final private static String CLASSIFICATION_ANNOTATION_JSON = "{" +
+			"\"classifyingObject\": null," +
+			"\"classifiedObject\": null," +
+			"\"annotationType\": \"MySubType2\"," +
+			"\"javaClass\": \"org.apache.atlas.odf.core.integrationtest.metadata.atlas.MySubType2\"," +
+			"\"analysisRun\": \"bla\"," +
+			"\"newProp1\": 42," +
+			"\"newProp2\": \"hi\"," +
+			"\"newProp3\": \"hello\"" +
+		"}";
+
+	final private static String RELATIONSHIP_ANNOTATION_JSON = "{" +
+			"\"relatedObjects\": null," +
+			"\"annotationType\": \"MySubType3\"," +
+			"\"javaClass\": \"org.apache.atlas.odf.core.integrationtest.metadata.atlas.MySubType3\"," +
+			"\"analysisRun\": \"bla\"," +
+			"\"newProp1\": 42," +
+			"\"newProp2\": \"hi\"," +
+			"\"newProp3\": \"hello\"" +
+		"}";
+
+	 /**
+	  *  Replacement for AtlasAnnotationTypeDefinitionCreatTest
+	  */
+
+	@Test
+	public void testSimpleAnnotationPrototypeCreation() throws Exception {
+		logger.info("Annotation string: " + PROFILING_ANNOTATION_JSON);
+		Annotation annot = JSONUtils.fromJSON(PROFILING_ANNOTATION_JSON, Annotation.class);
+		logger.info("Annotation: " + PROFILING_ANNOTATION_JSON);
+		Assert.assertTrue(annot instanceof ProfilingAnnotation);
+
+		logger.info("Annotation string: " + CLASSIFICATION_ANNOTATION_JSON);
+		annot = JSONUtils.fromJSON(CLASSIFICATION_ANNOTATION_JSON, Annotation.class);
+		logger.info("Annotation: " + CLASSIFICATION_ANNOTATION_JSON);
+		Assert.assertTrue(annot instanceof ClassificationAnnotation);
+
+		logger.info("Annotation string: " + RELATIONSHIP_ANNOTATION_JSON);
+		annot = JSONUtils.fromJSON(RELATIONSHIP_ANNOTATION_JSON, Annotation.class);
+		logger.info("Annotation: " + RELATIONSHIP_ANNOTATION_JSON);
+		Assert.assertTrue(annot instanceof RelationshipAnnotation);
+	}
+
+	@Test
+	public void testUnretrievedReference() throws Exception {
+		String repoId = "SomeRepoId";
+		Column col = new Column();
+		col.setName("name");
+		col.setReference(InvalidReference.createInvalidReference(repoId));
+
+		String json = JSONUtils.toJSON(col);
+		Column col2 = JSONUtils.fromJSON(json, Column.class);
+		Assert.assertTrue(InvalidReference.isInvalidRef(col2.getReference()));
+
+		Database db = new Database();
+		db.setName("database");
+
+		JSONUtils.toJSON(db);
+
+		db.setConnections(InvalidReference.createInvalidReferenceList(repoId));
+
+		Database db2 = JSONUtils.fromJSON(JSONUtils.toJSON(db), Database.class);
+		Assert.assertTrue(InvalidReference.isInvalidRefList(db2.getConnections()));
+	}
+
+	@Test
+	public void testExtensibleDiscoveryServiceEndpoints() throws Exception {
+		DiscoveryServiceProperties dsprops = new DiscoveryServiceProperties();
+		dsprops.setId("theid");
+		dsprops.setName("thename");
+
+		DiscoveryServiceEndpoint ep = new DiscoveryServiceEndpoint();
+		ep.setRuntimeName("newruntime");
+		ep.set("someKey", "someValue");
+		dsprops.setEndpoint(ep);
+
+		String dspropsJSON = JSONUtils.toJSON(dsprops);
+		logger.info("Discovery service props JSON: " +dspropsJSON);
+
+		DiscoveryServiceProperties deserProps = JSONUtils.fromJSON(dspropsJSON, DiscoveryServiceProperties.class);
+		Assert.assertNotNull(deserProps);
+		Assert.assertEquals("theid", dsprops.getId());
+		Assert.assertEquals("thename", dsprops.getName());
+		Assert.assertNotNull(deserProps.getEndpoint());
+		Assert.assertTrue(deserProps.getEndpoint() instanceof DiscoveryServiceEndpoint);
+		Assert.assertTrue(deserProps.getEndpoint().getClass().equals(DiscoveryServiceEndpoint.class));
+		DiscoveryServiceEndpoint deserEP = (DiscoveryServiceEndpoint) deserProps.getEndpoint();
+		Assert.assertEquals("newruntime", deserEP.getRuntimeName());
+		Assert.assertEquals("someValue", deserEP.get().get("someKey"));
+	}
+
+	@Test
+	public void testMetaDataCache() {
+		MetaDataCache cache = new MetaDataCache();
+
+		MetaDataObjectReference ref = new MetaDataObjectReference();
+		ref.setId("id");
+		ref.setRepositoryId("repositoryId");
+		DataFile dataFile = new DataFile();
+		dataFile.setName("dataFile");
+		dataFile.setEncoding("encoding");
+		dataFile.setReference(ref);
+
+		List<MetaDataObjectReference> refList = new ArrayList<MetaDataObjectReference>();
+		refList.add(ref);
+		StoredMetaDataObject storedObject = new StoredMetaDataObject(dataFile);
+		HashMap<String, List<MetaDataObjectReference>> referenceMap = new HashMap<String, List<MetaDataObjectReference>>();
+		referenceMap.put("id", refList);
+		storedObject.setReferencesMap(referenceMap);
+		List<StoredMetaDataObject> metaDataObjects = new ArrayList<StoredMetaDataObject>();
+		metaDataObjects.add(storedObject);
+		cache.setMetaDataObjects(metaDataObjects);
+
+		Connection con = new JDBCConnection();
+		con.setName("connection");
+		JDBCConnectionInfo conInfo = new JDBCConnectionInfo();
+		conInfo.setConnections(Collections.singletonList(con));
+		conInfo.setAssetReference(ref);
+		conInfo.setTableName("tableName");
+		List<ConnectionInfo> connectionInfoObjects = new ArrayList<ConnectionInfo>();
+		connectionInfoObjects.add(conInfo);
+		cache.setConnectionInfoObjects(connectionInfoObjects);
+
+		try {
+			String serializedCache = JSONUtils.toJSON(cache);
+			logger.info("Serialized metadata cache JSON: " + serializedCache);
+			MetaDataCache deserializedCache = JSONUtils.fromJSON(serializedCache, MetaDataCache.class);
+			Assert.assertEquals("dataFile", deserializedCache.getMetaDataObjects().get(0).getMetaDataObject().getName());
+			Assert.assertEquals("encoding", ((DataFile) deserializedCache.getMetaDataObjects().get(0).getMetaDataObject()).getEncoding());
+			Assert.assertEquals("connection", deserializedCache.getConnectionInfoObjects().get(0).getConnections().get(0).getName());
+			Assert.assertEquals("tableName", ((JDBCConnectionInfo) deserializedCache.getConnectionInfoObjects().get(0)).getTableName());
+			Assert.assertEquals("repositoryId", deserializedCache.getMetaDataObjects().get(0).getReferenceMap().get("id").get(0).getRepositoryId());
+		}
+		catch (JSONException e) {
+			e.printStackTrace();
+		}
+	}
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/.gitignore
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/.gitignore b/odf/odf-archetype-discoveryservice/.gitignore
new file mode 100755
index 0000000..67c976b
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/.gitignore
@@ -0,0 +1,17 @@
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+ .settings
+target
+.classpath
+.project

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/pom.xml
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/pom.xml b/odf/odf-archetype-discoveryservice/pom.xml
new file mode 100755
index 0000000..c9c2aed
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/pom.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.apache.atlas.odf</groupId>
+		<artifactId>odf</artifactId>
+		<version>1.2.0-SNAPSHOT</version>
+	</parent>
+	<artifactId>odf-archetype-discoveryservice</artifactId>
+	<packaging>maven-archetype</packaging>
+
+	<description>The SDP maven archetype for discovery services</description>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<build>
+		<extensions>
+			<extension>
+				<groupId>org.apache.maven.archetype</groupId>
+				<artifactId>archetype-packaging</artifactId>
+				<version>2.4</version>
+			</extension>
+		</extensions>
+
+		<pluginManagement>
+			<plugins>
+				<plugin>
+					<artifactId>maven-archetype-plugin</artifactId>
+					<version>2.4</version>
+				</plugin>
+			</plugins>
+		</pluginManagement>
+	</build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/main/resources/META-INF/maven/archetype.xml
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/main/resources/META-INF/maven/archetype.xml b/odf/odf-archetype-discoveryservice/src/main/resources/META-INF/maven/archetype.xml
new file mode 100755
index 0000000..9848e46
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/main/resources/META-INF/maven/archetype.xml
@@ -0,0 +1,27 @@
+<!--
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+<archetype>
+  <id>odf-archetype-discoveryservice-jar</id>
+  <sources>
+    <source>src/main/java/MyAnnotation.java</source>
+    <source>src/main/java/MyDiscoveryService.java</source>
+  </sources>
+  <resources>
+    <resource>src/main/resources/META-INF/odf/odf-services.json</resource>
+  </resources>
+  <testSources>
+    <source>src/test/java/MyDiscoveryServiceTest.java</source>
+  </testSources>
+</archetype>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/pom.xml
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/pom.xml b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/pom.xml
new file mode 100755
index 0000000..0ada9e8
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/pom.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>${groupId}</groupId>
+	<artifactId>${artifactId}</artifactId>
+	<version>${version}</version>
+	<packaging>jar</packaging>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+	</properties>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-api</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+		</dependency>
+
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.12</version>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyAnnotation.java
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyAnnotation.java b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyAnnotation.java
new file mode 100755
index 0000000..8ce0d2f
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyAnnotation.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ package ${package};
+
+import org.apache.atlas.odf.api.metadata.models.ProfilingAnnotation;
+
+public class MyAnnotation extends ProfilingAnnotation {
+
+	private String myProperty;
+
+	public String getMyProperty() {
+		return myProperty;
+	}
+
+	public void setMyProperty(String myValue) {
+		this.myProperty = myValue;
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyDiscoveryService.java
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyDiscoveryService.java b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyDiscoveryService.java
new file mode 100755
index 0000000..a07ccdb
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/java/MyDiscoveryService.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ package ${package};
+
+import java.util.Collections;
+import java.util.Date;
+
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceRequest;
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceResponse.ResponseCode;
+import org.apache.atlas.odf.api.discoveryservice.SyncDiscoveryServiceBase;
+import org.apache.atlas.odf.api.discoveryservice.sync.DiscoveryServiceSyncResponse;
+
+/**
+ * A simple synchronous discovery service that creates one annotation for the data set it analyzes.
+ *
+ */
+public class MyDiscoveryService extends SyncDiscoveryServiceBase {
+
+	@Override
+	public DiscoveryServiceSyncResponse runAnalysis(DiscoveryServiceRequest request) {
+		// 1. create an annotation that annotates the data set object passed in the request
+		MyAnnotation annotation = new MyAnnotation();
+		annotation.setProfiledObject(request.getDataSetContainer().getDataSet().getReference());
+		// set a new property called "tutorialProperty" to some string
+		annotation.setMyProperty("My property was created on " + new Date());
+
+		// 2. create a response with our annotation created above
+		return createSyncResponse( //
+				ResponseCode.OK, // Everything works OK
+				"Everything worked", // human-readable message
+				Collections.singletonList(annotation) // new annotations
+		);
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/resources/META-INF/odf/odf-services.json
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/resources/META-INF/odf/odf-services.json b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/resources/META-INF/odf/odf-services.json
new file mode 100755
index 0000000..e90ce7b
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/main/resources/META-INF/odf/odf-services.json
@@ -0,0 +1,11 @@
+[
+  {
+	"id": "${groupId}.${artifactId}.MyDiscoveryService",
+	"name": "My service",
+	"description": "My service creates my annotation for a data set",
+	"endpoint": {
+		"runtimeName": "Java",
+		"className": "${package}.MyDiscoveryService"
+	}
+  }
+]

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/test/java/MyDiscoveryServiceTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/test/java/MyDiscoveryServiceTest.java b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/test/java/MyDiscoveryServiceTest.java
new file mode 100755
index 0000000..bc585d2
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/main/resources/archetype-resources/src/test/java/MyDiscoveryServiceTest.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ package ${package};
+
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Unit test template for discovery service
+ */
+public class MyDiscoveryServiceTest {
+
+	@Test
+	public void test() throws Exception {
+		Assert.assertTrue(true);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/archetype.properties
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/archetype.properties b/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/archetype.properties
new file mode 100755
index 0000000..9fbb593
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/archetype.properties
@@ -0,0 +1,23 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+archetype.groupId=org.apache.atlas.odf
+archetype.artifactId=odf-archetype-discoveryservice-jar
+archetype.version=1.2.0-SNAPSHOT
+
+groupId=jg1
+artifactId=ja1
+version=0.1
+package=odf.j.p1.p2
+

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/goal.txt
----------------------------------------------------------------------
diff --git a/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/goal.txt b/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/goal.txt
new file mode 100755
index 0000000..3cb5141
--- /dev/null
+++ b/odf/odf-archetype-discoveryservice/src/test/resources/projects/it1/goal.txt
@@ -0,0 +1,14 @@
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+clean verify

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-atlas/.gitignore
----------------------------------------------------------------------
diff --git a/odf/odf-atlas/.gitignore b/odf/odf-atlas/.gitignore
new file mode 100755
index 0000000..174a0a7
--- /dev/null
+++ b/odf/odf-atlas/.gitignore
@@ -0,0 +1,20 @@
+#
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+.settings
+target
+.classpath
+.project
+.factorypath
+.DS_Store
+derby.log

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-atlas/atlasconfig/jetty-web.xml
----------------------------------------------------------------------
diff --git a/odf/odf-atlas/atlasconfig/jetty-web.xml b/odf/odf-atlas/atlasconfig/jetty-web.xml
new file mode 100755
index 0000000..66ec730
--- /dev/null
+++ b/odf/odf-atlas/atlasconfig/jetty-web.xml
@@ -0,0 +1,24 @@
+<!--
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+<Configure class="org.eclipse.jetty.webapp.WebAppContext">
+	<Get name="securityHandler">
+		<Set name="loginService">
+			<New class="org.eclipse.jetty.security.HashLoginService">
+				<Set name="name">ODF Realm</Set>
+				<Set name="config"><SystemProperty name="atlas.home" default="."/>/conf/realm.properties</Set>
+			</New>
+		</Set>
+	</Get>
+</Configure>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-atlas/atlasconfig/realm.properties
----------------------------------------------------------------------
diff --git a/odf/odf-atlas/atlasconfig/realm.properties b/odf/odf-atlas/atlasconfig/realm.properties
new file mode 100755
index 0000000..0d57c4a
--- /dev/null
+++ b/odf/odf-atlas/atlasconfig/realm.properties
@@ -0,0 +1,24 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Credentials for Atlas basic authentication
+#
+# Format:
+# <username>: <password>[,<rolename> ...]
+#
+# Password is stored in obfuscated format.
+# Re-generate password using the org.eclipse.jetty.util.security.Password class in the jetty lib folder.
+# Example:
+# cd jetty-distribution-<version>/lib
+# java -cp jetty-util-<version>.jar org.eclipse.jetty.util.security.Password <plain password>
+atlas: OBF:1v1p1s3m1w1s1wtw1u3019q71u2a1wui1w1q1s3g1v2p,user

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-atlas/build_atlas.xml
----------------------------------------------------------------------
diff --git a/odf/odf-atlas/build_atlas.xml b/odf/odf-atlas/build_atlas.xml
new file mode 100755
index 0000000..8b6de87
--- /dev/null
+++ b/odf/odf-atlas/build_atlas.xml
@@ -0,0 +1,265 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+<project name="build_atlas">
+	<dirname property="script.basedir" file="${ant.file.build_atlas}" />
+	<property name="atlas-dir" value="apache-atlas-${atlas.version}" />
+	<!-- Properties  provided by pom.xml: -->
+	<!-- <property name="atlas-unpack-dir" value="" /> -->
+	<!-- <property name="atlas.version" value="" /> -->
+
+	<property name="atlas-archive" value="/tmp/${atlas-dir}-bin.zip" />
+
+	<condition property="is-windows">
+		<os family="windows">
+		</os>
+	</condition>
+
+	<condition property="is-unix">
+		<os family="unix">
+		</os>
+	</condition>
+
+	<condition property="is-mac">
+		<os family="mac">
+		</os>
+	</condition>
+
+	<condition property="atlas-zip-not-found">
+		<not>
+			<available file="${atlas-archive}">
+			</available>
+		</not>
+	</condition>
+
+	<condition property="atlas-unpacked">
+	   <available file="${atlas-unpack-dir}/${atlas-dir}/bin/atlas_start.py"/>
+    </condition>
+
+	<condition property="atlas-running">
+		<available file="${atlas-unpack-dir}/${atlas-dir}/logs/atlas.pid"/>
+	</condition>
+
+	<condition property="running-build-process">
+		<equals arg1="${atlas-unpack-dir}" arg2="/tmp"/>
+	</condition>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="download-atlas" if="atlas-zip-not-found">
+		<echo message="Downloading Apache Atlas 0.7-incubating-release. Depending on your network this can last up to 20 (yes, twenty) minutes." />
+		<!-- Make sure to update text message when moving to a new Atlas release / revision -->
+		<get verbose="true" src="https://ibm.box.com/shared/static/ftwi0wlpjtyv3nnvyh354epayqfwynsn.zip" dest="${atlas-archive}" />
+		<echo message="Atlas downloaded" />
+	</target>
+
+	<target name="unzip-atlas" unless="atlas-unpacked">
+		<antcall target="download-atlas"/>
+		<echo message="Installing Atlas test instance" />
+		<echo message="Deleting ${atlas-unpack-dir}/${atlas-dir}" />
+		<delete dir="${atlas-unpack-dir}/${atlas-dir}" failonerror="false" />
+		<echo message="deleted" />
+		<chmod file="${atlas-unpack-dir}/${atlas-archive}" perm="755" os="unix,mac"/>
+		<unzip src="${atlas-archive}" dest="${atlas-unpack-dir}" />
+	</target>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="stop-atlas" if="atlas-unpacked">
+		<echo message="Stopping atlas server if it exists" />
+		<exec dir="${atlas-unpack-dir}/${atlas-dir}/bin" executable="python">
+			<env key="JAVA_HOME" value="${java.home}" />
+        		<arg value="atlas_stop.py" />
+    		</exec>
+		<sleep seconds="10" />
+	</target>
+
+	<target name="ensure-atlas-stopped" depends="print-info" unless="use.running.atlas">
+		<echo message="Ensure Atlas is stopped..."/>
+		<antcall target="stop-atlas"/>
+		<delete file="${atlas-unpack-dir}/${atlas-dir}/logs/atlas.pid"/>
+		<echo message="Atlas is stopped."/>
+	</target>
+
+	<target name="remove-atlas-dir" depends="ensure-atlas-stopped" if="running-build-process">
+    	<echo message="Resetting atlas data"/>
+    	<delete dir="/tmp/${atlas-dir}" />
+    	<echo message="Atlas directory deleted"/>
+	</target>
+
+	<target name="reset-derby-data">
+    	<echo message="Resetting derby DB"/>
+    	<delete dir="/tmp/odf-derby" />
+	</target>
+
+	<target name="restart-atlas-on-windows" if="is-windows">
+		<antcall target="start-atlas"/>
+		<antcall target="stop-atlas"/>
+	</target>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="start-atlas">
+		<echo message="Starting atlas server" />
+		<exec dir="${atlas-unpack-dir}/${atlas-dir}/bin" executable="python">
+			<env key="JAVA_HOME" value="${java.home}/.." />
+			<arg value="atlas_start.py" />
+		</exec>
+		<echo message="Waiting for Atlas Server to start..." />
+		<waitfor maxwait="60" maxwaitunit="second">
+			<socket server="localhost" port="21443" />
+		</waitfor>
+	</target>
+
+	<target name="check-atlas-url">
+		<fail>
+			<condition>
+				<not>
+					<socket server="localhost" port="21443" />
+				</not>
+			</condition>
+		</fail>
+	</target>
+
+	<target name="prepare-atlas" unless="atlas-running">
+		<antcall target="unzip-atlas"/>
+		<antcall target="enable-atlas-ssl"/>
+	</target>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="import-atlas-sampledata-win" if="is-windows">
+		<echo message="Importing sample data" />
+		<exec executable="cmd">
+			<env key="JAVA_HOME" value="${java.home}" />
+			<arg value="/c" />
+			<arg value="${atlas-unpack-dir}/${atlas-dir}/bin/quick_start.py" />
+		</exec>
+
+		<echo message="Atlas test instance brought up" />
+	</target>
+
+	<target name="import-atlas-sampledata-unix" if="is-unix">
+		<echo message="Importing sample data" />
+		<exec dir="${atlas-unpack-dir}/${atlas-dir}/bin" executable="python">
+			<env key="JAVA_HOME" value="${java.home}" />
+			<arg value="quick_start.py" />
+		</exec>
+
+		<echo message="Atlas test instance brought up" />
+	</target>
+
+	<target name="import-atlas-sampledata" depends="import-atlas-sampledata-win,import-atlas-sampledata-unix">
+	</target>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="select-atlas-config-file-windows" if="is-windows">
+		<copy file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties_windows" tofile="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties" overwrite="true"/>
+		<echo message="Using atlas SSL configuration for Windows." />
+	</target>
+
+	<target name="select-atlas-config-file-mac" if="is-mac">
+		<copy file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties_mac" tofile="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties" overwrite="true"/>
+		<echo message="Using atlas SSL configuration for Mac OS." />
+	</target>
+
+	<target name="select-atlas-config-file-unix" if="is-unix">
+		<copy file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties_linux" tofile="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties" overwrite="true"/>
+		<echo message="Using atlas SSL configuration for Unix." />
+	</target>
+
+	<target name="select-atlas-config-file" depends="select-atlas-config-file-unix,select-atlas-config-file-windows,select-atlas-config-file-mac">
+	</target>
+
+	<target name="unquote-colons-in-atlas-config-file">
+		<!-- The following replacement is needed because the ant propertyfile task quotes colons and backslashed-->
+		<replace file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties">
+			<replacetoken>\:</replacetoken>
+			<replacevalue>:</replacevalue>
+		</replace>
+		<replace file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties">
+			<replacetoken>\\</replacetoken>
+			<replacevalue>\</replacevalue>
+		</replace>
+	</target>
+
+	<target name="enable-atlas-ssl">
+		<!-- For Atlas security features see: http://atlas.incubator.apache.org/Security.html -->
+		<echo message="Updating atlas-application.properties file..." />
+		<propertyfile file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties">
+			<entry  key="cert.stores.credential.provider.path" value="jceks://file/${sys:atlas.home}/conf/keystore_openjdk.jceks"/>
+			<entry  key="atlas.enableTLS" value="true"/>
+			<entry  key="truststore.file" value="${sys:atlas.home}/conf/keystore_openjdk.jks"/>
+			<entry  key="keystore.file" value="${sys:atlas.home}/conf/keystore_openjdk.jks"/>
+			<entry  key="atlas.server.https.port" value="21443"/>
+			<entry  key="atlas.DeleteHandler.impl" value="org.apache.atlas.repository.graph.HardDeleteHandler"/>
+			<entry  key="atlas.TypeCache.impl" value="org.apache.atlas.repository.typestore.StoreBackedTypeCache"/>
+		</propertyfile>
+		<antcall target="unquote-colons-in-atlas-config-file"/>
+		<!-- Keep this version of the config file for Mac (using oracle/open jdk) -->
+		<copy file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties" tofile="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties_mac" overwrite="true"/>
+
+		<!-- Create separate version of config file for Linux (using ibm jdk) -->
+		<propertyfile file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties">
+			<entry  key="cert.stores.credential.provider.path" value="jceks://file/${sys:atlas.home}/conf/keystore_ibmjdk.jceks"/>
+			<entry  key="truststore.file" value="${sys:atlas.home}/conf/keystore_ibmjdk.jks"/>
+			<entry  key="keystore.file" value="${sys:atlas.home}/conf/keystore_ibmjdk.jks"/>
+		</propertyfile>
+		<antcall target="unquote-colons-in-atlas-config-file"/>
+		<copy file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties" tofile="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties_linux" overwrite="true"/>
+
+		<!-- Create separate version of config file for Windows (using ibm jdk and hardcoded credential provider file (issue #94)) -->
+		<propertyfile file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties">
+			<entry  key="cert.stores.credential.provider.path" value="jceks://file/C\:/tmp/${atlas-dir}/conf/keystore_ibmjdk.jceks"/>
+		</propertyfile>
+		<antcall target="unquote-colons-in-atlas-config-file"/>
+		<copy file="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties" tofile="${atlas-unpack-dir}/${atlas-dir}/conf/atlas-application.properties_windows" overwrite="true"/>
+
+		<!-- keystore.jceks file is stored in Box@IBM - Re-generate the file using Atlas command bin/cputil.sh -->
+		<!-- Note that ibm jdk uses different format than oracle/open jdk, therefore a separate version has to be generated for each jdk -->
+		<get verbose="true" src="https://ibm.box.com/shared/static/uyzqeayk5ut5f5fqnlvm8nhn9ixb642d.jceks" dest="${atlas-unpack-dir}/${atlas-dir}/conf/keystore_openjdk.jceks" />
+		<get verbose="true" src="https://ibm.box.com/shared/static/ibopoyukw7uhbt83a1zu33nwvnamht3j.jceks" dest="${atlas-unpack-dir}/${atlas-dir}/conf/keystore_ibmjdk.jceks" />
+		<!-- keystore.jks file is stored in Box@IBM - Re-generate the file using the Java keytool -->
+		<!-- command: keytool -genkey -alias myatlas -keyalg RSA -keystore /tmp/atlas-security/keystore.jks -keysize 2048 -->
+		<!-- Note that ibm jdk uses different format than oracle/open jdk, therefore a separate version has to be generated for each jdk -->
+		<get verbose="true" src="https://ibm.box.com/shared/static/odnmhqua5sdue03z43vqsv0lp509ov70.jks" dest="${atlas-unpack-dir}/${atlas-dir}/conf/keystore_openjdk.jks" />
+		<get verbose="true" src="https://ibm.box.com/shared/static/k0qgh31ynbgnjsrbg5s97hsqbssh6pd4.jks" dest="${atlas-unpack-dir}/${atlas-dir}/conf/keystore_ibmjdk.jks" />
+
+		<antcall target="select-atlas-config-file"/>
+		<echo message="Atlas SSL has been enabled." />
+		<!-- On windows, Atlas needs to be re-started again in order for the kafka queues to come up properly -->
+		<antcall target="restart-atlas-on-windows" />
+	</target>
+
+	<!-- ****************************************************************************************** -->
+	<target name="print-info" if="use.running.atlas">
+		<echo message="Don't start/stop Atlas because use.running.atlas is set" />
+	</target>
+
+	<target name="clean-atlas" depends="print-info" unless="use.running.atlas">
+		<echo message="Cleaning Atlas" />
+		<antcall target="remove-atlas-dir"/>
+		<antcall target="reset-derby-data"/>
+	</target>
+
+	<target name="ensure-atlas-running" depends="print-info" unless="use.running.atlas">
+		<echo message="Ensure that Atlas is running" />
+		<antcall target="prepare-atlas" />
+		<antcall target="start-atlas"/>
+		<antcall target="check-atlas-url"/>
+		<echo message="Atlas is running" />
+	</target>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-atlas/pom.xml
----------------------------------------------------------------------
diff --git a/odf/odf-atlas/pom.xml b/odf/odf-atlas/pom.xml
new file mode 100755
index 0000000..cc714e6
--- /dev/null
+++ b/odf/odf-atlas/pom.xml
@@ -0,0 +1,216 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
+	xmlns:if="ant:if">
+	<modelVersion>4.0.0</modelVersion>
+	<parent>
+		<groupId>org.apache.atlas.odf</groupId>
+		<artifactId>odf</artifactId>
+		<version>1.2.0-SNAPSHOT</version>
+	</parent>
+	<artifactId>odf-atlas</artifactId>
+
+	<dependencies>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-api</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-core</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-messaging</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<scope>runtime</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-messaging</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<type>test-jar</type>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-store</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<scope>runtime</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-spark</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<scope>runtime</scope>
+		</dependency>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.12</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-core</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<type>test-jar</type>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.atlas.odf</groupId>
+			<artifactId>odf-spark</artifactId>
+			<version>1.2.0-SNAPSHOT</version>
+			<type>test-jar</type>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.apache.derby</groupId>
+			<artifactId>derby</artifactId>
+			<version>10.12.1.1</version>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-failsafe-plugin</artifactId>
+				<version>2.19</version>
+				<configuration>
+					<systemPropertyVariables>
+						<odf.zookeeper.connect>${testZookeepeConnectionString}</odf.zookeeper.connect>
+						<odf.logspec>${odf.integrationtest.logspec}</odf.logspec>
+						<atlas.url>${atlas.url}</atlas.url>
+						<atlas.user>${atlas.user}</atlas.user>
+						<atlas.password>${atlas.password}</atlas.password>
+					</systemPropertyVariables>
+					<dependenciesToScan>
+						<dependency>org.apache.atlas.odf:odf-core</dependency>
+					</dependenciesToScan>
+					<includes>
+						<include>**/integrationtest/**/**.java</include>
+					</includes>
+				</configuration>
+				<executions>
+					<execution>
+						<id>integration-test</id>
+						<goals>
+							<goal>integration-test</goal>
+						</goals>
+					</execution>
+					<execution>
+						<id>verify</id>
+						<goals>
+							<goal>verify</goal>
+						</goals>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.19</version>
+				<configuration>
+					<systemPropertyVariables>
+						<odf.zookeeper.connect>${testZookeepeConnectionString}</odf.zookeeper.connect>
+						<odf.logspec>${odf.unittest.logspec}</odf.logspec>
+						<odf.build.project.name>${project.name}</odf.build.project.name>
+						<atlas.url>${atlas.url}</atlas.url>
+						<atlas.user>${atlas.user}</atlas.user>
+						<atlas.password>${atlas.password}</atlas.password>
+					</systemPropertyVariables>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-antrun-plugin</artifactId>
+				<version>1.8</version>
+				<executions>
+					<execution>
+						<inherited>false</inherited>
+						<id>clean-atlas</id>
+						<phase>clean</phase>
+						<goals>
+							<goal>run</goal>
+						</goals>
+						<configuration>
+							<target>
+								<property name="atlas-unpack-dir" value="/tmp"/>
+								<property name="atlas.version" value="${atlas.version}"/>
+								<ant antfile="build_atlas.xml" target="clean-atlas"/>
+							</target>
+						</configuration>
+					</execution>
+					<execution>
+						<id>ensure-atlas-running</id>
+						<phase>process-test-classes</phase>
+						<!-- <phase>pre-integration-test</phase> -->
+						<goals>
+							<goal>run</goal>
+						</goals>
+						<configuration>
+							<target unless="skipTests">
+								<property name="atlas-unpack-dir" value="/tmp" />
+								<property name="atlas.version" value="${atlas.version}" />
+								<ant antfile="build_atlas.xml" target="ensure-atlas-running"></ant>
+							</target>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+
+	<profiles>
+		<profile>
+			<id>atlas</id>
+			<build>
+				<plugins>
+					<plugin>
+						<groupId>org.apache.maven.plugins</groupId>
+						<artifactId>maven-antrun-plugin</artifactId>
+						<version>1.8</version>
+						<executions>
+							<!-- Start Atlas even in order to have it available for the test-env when skipping the tests  -->
+							<execution>
+								<id>ensure-atlas-running</id>
+								<phase>process-test-classes</phase>
+								<!-- <phase>pre-integration-test</phase> -->
+								<goals>
+									<goal>run</goal>
+								</goals>
+								<configuration>
+									<target unless="skipTests">
+										<property name="atlas-unpack-dir" value="/tmp" />
+										<property name="atlas.version" value="${atlas.version}" />
+										<ant antfile="build_atlas.xml" target="ensure-atlas-running"></ant>
+									</target>
+								</configuration>
+							</execution>
+						</executions>
+					</plugin>
+				</plugins>
+			</build>
+		</profile>
+	</profiles>
+
+</project>


Mime
View raw message