atlas-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From mad...@apache.org
Subject [01/25] incubator-atlas git commit: ATLAS-1898: initial commit of ODF
Date Wed, 28 Jun 2017 05:57:14 GMT
Repository: incubator-atlas
Updated Branches:
  refs/heads/feature-odf 8101883cc -> 6d19e1295


http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/main/webapp/scripts/odf-utils.js
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/main/webapp/scripts/odf-utils.js b/odf/odf-web/src/main/webapp/scripts/odf-utils.js
new file mode 100755
index 0000000..5684556
--- /dev/null
+++ b/odf/odf-web/src/main/webapp/scripts/odf-utils.js
@@ -0,0 +1,338 @@
+/**
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var $ = require("jquery");
+var React = require("react");
+var ODFGlobals = require("./odf-globals.js");
+
+var Utils= {
+
+	arraysEqual : function(arr1, arr2){
+		var a = arr1;
+		var b = arr2;
+		if(arr1 == null){
+			if(arr2 == null){
+				return true;
+			}
+			return false;
+		}else{
+			if(arr2 == null){
+				return false;
+			}
+		}
+
+		if(a.length != b.length){
+			return false;
+		}
+
+		var equal = true;
+		$.each(a, function(key, val){
+			if(a[key] && !b[key]){
+				equal = false;
+				return;
+			}
+			if(val && typeof val == "object"){
+				equal = this.arraysEqual(val, b[key]);
+				return;
+			}else{
+				if(val != b[key]){
+					equal = false;
+					return;
+				}
+			}
+		}.bind(this));
+		return equal;
+	},
+
+	AnnotationStoreHelper : {
+		loadAnnotationsForRequest : function(analysisRequestId, successCallback, errorCallback) {
+		    var url = ODFGlobals.annotationsUrl + "?analysisRequestId=" + analysisRequestId;
+            return $.ajax({
+               url: url,
+               type: 'GET',
+               success: function(data) {
+                   if(successCallback){
+                       successCallback(data);
+                   }
+               },
+               error: function(xhr, status, err) {
+                   if(errorCallback){
+                       errorCallback(err);
+                   }
+               }
+            });
+		}
+	},
+
+	AtlasHelper : {
+
+		loadAtlasAssets : function(assets, successCallback, errorCallback){
+			var reqs = [];
+			$.each(assets, function(key, val){
+				reqs.push(this.loadAtlasAsset(val, successCallback, errorCallback));
+			}.bind(this));
+			return reqs;
+		},
+
+		loadMostRecentAnnotations : function(asset, successCallback, errorCallback) {
+		    var url = ODFGlobals.annotationsUrl + "/newestAnnotations/" + encodeURIComponent(JSON.stringify({repositoryId: asset.repositoryId, id: asset.id}));
+            return $.ajax({
+               url: url,
+               type: 'GET',
+               success: function(data) {
+                   if(successCallback){
+                       successCallback(data);
+                   }
+               },
+               error: function(xhr, status, err) {
+                   if(errorCallback){
+                       errorCallback(err);
+                   }
+               }
+            });
+		},
+
+		loadRelationalDataSet: function(dataSet, successCallback, errorCallback) {
+			var url = ODFGlobals.metadataUrl + "/asset/" + encodeURIComponent(JSON.stringify({repositoryId: dataSet.reference.repositoryId, id: dataSet.reference.id})) + "/columns";
+			return $.ajax({
+				url: url,
+				type: 'GET',
+				error: function(xhr, status, err) {
+					if(errorCallback){
+						errorCallback(err);
+					}
+				}
+			}).then( function(cols){
+				if(!cols){
+					successCallback([]);
+					return [];
+				}
+				var requests = [];
+				var colRefs = [];
+				$.each(cols, function(key, val){
+					var req = Utils.AtlasHelper.getColAnnotations(val);
+					requests.push(req);
+					colRefs.push(val.reference);
+				}.bind(this));
+				dataSet.columns = colRefs;
+				$.when.apply(undefined, requests).done(function(){
+					var data = [];
+					if(requests.length > 1){
+						$.each(arguments, function(key, val){
+							data.push(val);
+						});
+					}else if(arguments[0]){
+						data.push(arguments[0]);
+					}
+					successCallback(data);
+				});
+				return requests;
+			})
+		},
+
+		getColAnnotations: function(asset, successCallback, errorCallback) {
+			var refid = asset.reference.id;
+		   var annotationsUrl = ODFGlobals.annotationsUrl + "?assetReference=" + encodeURIComponent(refid);
+		   return $.ajax({
+			   url: annotationsUrl,
+			   type: 'GET',
+			   success: function(annotationData) {
+				   asset.annotations = annotationData.annotations;
+				   if (successCallback) {
+					   successCallback(asset);
+				   }
+			   },
+			   error: function(xhr, status, err) {
+				   if(errorCallback){
+					   errorCallback(err);
+				   }
+			   }
+		   }).then(function(annotationData) {
+			   asset.annotations = annotationData.annotations;
+			   return asset;
+		   });
+		},
+
+		loadAtlasAsset : function(asset, successCallback, errorCallback){
+			var url = ODFGlobals.metadataUrl + "/asset/" + encodeURIComponent(JSON.stringify({repositoryId: asset.repositoryId, id: asset.id}));
+			return $.ajax({
+		       url: url,
+		       type: 'GET',
+		       error: function(xhr, status, err) {
+		    	   if(errorCallback){
+		    		   errorCallback(err);
+		    	   }
+		       }
+			}).then( function(data) {
+	    		   var refid = data.reference.id;
+	    		   var annotationsUrl = ODFGlobals.annotationsUrl + "?assetReference=" + encodeURIComponent(refid);
+	    		   return $.ajax({
+	    			  url: annotationsUrl,
+	    			  type: 'GET',
+	    			  success: function(annotationData) {
+	    				  data.annotations = annotationData.annotations;
+	    				  if (successCallback) {
+	    					  successCallback(data);
+	    				  }
+	    			  },
+	    			  error: function(xhr, status, err) {
+	    				  if(errorCallback){
+	    					  errorCallback(err);
+	    				  }
+	    			  }
+	    		   }).then(function(annotationData) {
+	     			   data.annotations = annotationData.annotations;
+	    			   return data;
+	    		   });
+			});
+		},
+
+		searchAtlasMetadata : function(query, successCallback, errorCallback) {
+			var url = ODFGlobals.metadataUrl + "/search?" + $.param({query: query});
+			var req = $.ajax({
+				url: url,
+				dataType: 'json',
+				type: 'GET',
+				success: function(data) {
+					successCallback(data);
+				},
+				error: function(xhr, status, err) {
+					console.error(url, status, err.toString());
+					var msg = "Error while loading recent analysis requests: " + err.toString();
+					errorCallback(msg);
+				}
+			});
+			return req;
+		}
+	},
+
+	MetadataStore : {
+
+		getProperties(successCallback, alertCallback) {
+			if (alertCallback) {
+				alertCallback({type: ""});
+			}
+			return $.ajax({
+				url: ODFGlobals.metadataUrl,
+				dataType: 'json',
+				type: 'GET',
+				success: successCallback,
+				error: function(xhr, status, err) {
+					if (alertCallback) {
+						var msg = "Error while reading metadata store properties: " + err.toString();
+						alertCallback({type: "danger", message: msg});
+					}
+				}
+			});
+		}
+	},
+
+	ConfigurationStore : {
+
+	  // readUserDefinedProperties(successCallback, alertCallback) {
+	   readConfig(successCallback, alertCallback) {
+		   if (alertCallback) {
+		     alertCallback({type: ""});
+		   }
+	     // clear alert
+
+	     return $.ajax({
+	       url: ODFGlobals.apiPrefix + "settings",
+	       dataType: 'json',
+	       type: 'GET',
+	       success: successCallback,
+	       error: function(xhr, status, err) {
+	         if (alertCallback) {
+	            var msg = "Error while reading user defined properties: " + err.toString();
+	            alertCallback({type: "danger", message: msg});
+	         }
+	       }
+	      });
+	   },
+
+	   updateConfig(config, successCallback, alertCallback) {
+			if (alertCallback) {
+				 alertCallback({type: ""});
+			}
+
+		    return $.ajax({
+			       url: ODFGlobals.apiPrefix + "settings",
+			       contentType: "application/json",
+			       dataType: 'json',
+			       type: 'PUT',
+			       data: JSON.stringify(config),
+			       success: successCallback,
+			       error: function(xhr, status, err) {
+			         if (alertCallback) {
+			            var msg = "Error while reading user defined properties: " + err.toString();
+			            alertCallback({type: "danger", message: msg});
+			         }
+			       }
+		     });
+	   }
+	},
+
+	ServicesStore : {
+
+	  // readUserDefinedProperties(successCallback, alertCallback) {
+	   getServices(successCallback, alertCallback) {
+		   if (alertCallback) {
+		     alertCallback({type: ""});
+		   }
+	     // clear alert
+
+	     return $.ajax({
+	       url: ODFGlobals.apiPrefix + "services",
+	       dataType: 'json',
+	       type: 'GET',
+	       success: successCallback,
+	       error: function(xhr, status, err) {
+	         if (alertCallback) {
+	            var msg = "Error while getting list of ODF services: " + err.toString();
+	            alertCallback({type: "danger", message: msg});
+	         }
+	       }
+	      });
+	   }
+	},
+
+	URLHelper : {
+
+		getBaseHash : function(){
+			var baseHash = "#" + document.location.hash.split("#")[1];
+			var split = baseHash.split("/");
+			if(split.length>0){
+				return split[0];
+			}
+			return "";
+		},
+
+		setUrlHash : function(newAddition){
+			if(!newAddition){
+				newAddition = "";
+			}
+			if(newAddition != "" && typeof newAddition === "object"){
+				newAddition = JSON.stringify(newAddition);
+			}
+			var hash = document.location.hash;
+			var baseHash = this.getBaseHash();
+			if(!hash.startsWith(baseHash)){
+				return;
+			}
+			document.location.hash = baseHash + "/" + encodeURIComponent(newAddition);
+		}
+	}
+};
+
+module.exports = Utils;

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/main/webapp/swagger/index.html
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/main/webapp/swagger/index.html b/odf/odf-web/src/main/webapp/swagger/index.html
new file mode 100755
index 0000000..4eb6ff1
--- /dev/null
+++ b/odf/odf-web/src/main/webapp/swagger/index.html
@@ -0,0 +1,142 @@
+
+<!DOCTYPE html>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~     http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<html>
+<head>
+  <meta charset="UTF-8">
+  <title>Swagger UI</title>
+  <link rel="icon" type="image/png" href="images/favicon-32x32.png" sizes="32x32" />
+  <link rel="icon" type="image/png" href="images/favicon-16x16.png" sizes="16x16" />
+  <link href='css/typography.css' media='screen' rel='stylesheet' type='text/css'/>
+  <link href='css/reset.css' media='screen' rel='stylesheet' type='text/css'/>
+  <link href='css/screen.css' media='screen' rel='stylesheet' type='text/css'/>
+  <link href='css/reset.css' media='print' rel='stylesheet' type='text/css'/>
+  <link href='css/print.css' media='print' rel='stylesheet' type='text/css'/>
+  <script src='lib/jquery-1.8.0.min.js' type='text/javascript'></script>
+  <script src='lib/jquery.slideto.min.js' type='text/javascript'></script>
+  <script src='lib/jquery.wiggle.min.js' type='text/javascript'></script>
+  <script src='lib/jquery.ba-bbq.min.js' type='text/javascript'></script>
+  <script src='lib/handlebars-2.0.0.js' type='text/javascript'></script>
+  <script src='lib/underscore-min.js' type='text/javascript'></script>
+  <script src='lib/backbone-min.js' type='text/javascript'></script>
+  <script src='swagger-ui.js' type='text/javascript'></script>
+  <script src='lib/highlight.7.3.pack.js' type='text/javascript'></script>
+  <script src='lib/jsoneditor.min.js' type='text/javascript'></script>
+  <script src='lib/marked.js' type='text/javascript'></script>
+  <script src='lib/swagger-oauth.js' type='text/javascript'></script>
+
+  <!-- Some basic translations -->
+  <!-- <script src='lang/translator.js' type='text/javascript'></script> -->
+  <!-- <script src='lang/ru.js' type='text/javascript'></script> -->
+  <!-- <script src='lang/en.js' type='text/javascript'></script> -->
+
+  <script type="text/javascript">
+    $(function () {
+      var url = window.location.search.match(/url=([^&]+)/);
+      if (url && url.length > 1) {
+        url = decodeURIComponent(url[1]);
+      } else {
+        url = "swagger.json";
+      }
+
+      // Pre load translate...
+      if(window.SwaggerTranslator) {
+        window.SwaggerTranslator.translate();
+      }
+      window.swaggerUi = new SwaggerUi({
+        url: url,
+        validatorUrl: null,
+        dom_id: "swagger-ui-container",
+        supportedSubmitMethods: ['get', 'post', 'put', 'delete', 'patch'],
+        onComplete: function(swaggerApi, swaggerUi){
+          if(typeof initOAuth == "function") {
+            initOAuth({
+              clientId: "your-client-id",
+              clientSecret: "your-client-secret-if-required",
+              realm: "your-realms",
+              appName: "your-app-name",
+              scopeSeparator: ",",
+              additionalQueryStringParams: {}
+            });
+          }
+
+          if(window.SwaggerTranslator) {
+            window.SwaggerTranslator.translate();
+          }
+
+          $('pre code').each(function(i, e) {
+            hljs.highlightBlock(e)
+          });
+
+          addApiKeyAuthorization();
+        },
+        onFailure: function(data) {
+          log("Unable to Load SwaggerUI");
+        },
+        docExpansion: "none",
+        jsonEditor: false,
+        apisSorter: "alpha",
+        defaultModelRendering: 'schema',
+        showRequestHeaders: false
+      });
+
+      function addApiKeyAuthorization(){
+        var key = encodeURIComponent($('#input_apiKey')[0].value);
+        if(key && key.trim() != "") {
+            var apiKeyAuth = new SwaggerClient.ApiKeyAuthorization("api_key", key, "query");
+            window.swaggerUi.api.clientAuthorizations.add("api_key", apiKeyAuth);
+            log("added key " + key);
+        }
+      }
+
+      $('#input_apiKey').change(addApiKeyAuthorization);
+
+      // if you have an apiKey you would like to pre-populate on the page for demonstration purposes...
+      /*
+        var apiKey = "myApiKeyXXXX123456789";
+        $('#input_apiKey').val(apiKey);
+      */
+
+      window.swaggerUi.load();
+
+      function log() {
+        if ('console' in window) {
+          console.log.apply(console, arguments);
+        }
+      }
+  });
+  </script>
+</head>
+
+<body class="swagger-section">
+<div id='header'>
+  <div class="swagger-ui-wrap">
+    <a id="logo" href="http://swagger.io">swagger</a>
+    <form id='api_selector'>
+      <div class='input'><input placeholder="http://example.com/api" id="input_baseUrl" name="baseUrl" type="text"/></div>
+      <div class='input'><input placeholder="api_key" id="input_apiKey" name="apiKey" type="text"/></div>
+      <div class='input'><a id="explore" href="#" data-sw-translate>Explore</a></div>
+    </form>
+  </div>
+</div>
+
+<div id="message-bar" class="swagger-ui-wrap" data-sw-translate>&nbsp;</div>
+<div id="swagger-ui-container" class="swagger-ui-wrap"></div>
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java
new file mode 100755
index 0000000..6f23c0d
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java
@@ -0,0 +1,79 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.integrationtest.admin;
+
+import java.io.InputStream;
+import java.util.Collection;
+
+import org.apache.atlas.odf.rest.test.RestTestBase;
+import org.apache.http.HttpResponse;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.fluent.Executor;
+import org.apache.http.client.fluent.Request;
+import org.apache.http.client.fluent.Response;
+import org.junit.Assert;
+import org.junit.Test;
+
+import org.apache.atlas.odf.api.engine.ServiceRuntimeInfo;
+import org.apache.atlas.odf.api.engine.ServiceRuntimesInfo;
+import org.apache.atlas.odf.api.engine.SystemHealth;
+import org.apache.atlas.odf.core.Utils;
+import org.apache.atlas.odf.json.JSONUtils;
+
+public class EngineResourceTest extends RestTestBase {
+
+	@Test
+	public void testHealth() throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(RestTestBase.getBaseURI() + "/engine/health");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Health check request returned: " + s);
+		checkResult(httpResp, HttpStatus.SC_OK);
+		SystemHealth health = JSONUtils.fromJSON(s, SystemHealth.class);
+		Assert.assertNotNull(health);
+	}
+	
+	boolean containsRuntimeWithName(Collection<ServiceRuntimeInfo> runtimes, String name) {
+		for (ServiceRuntimeInfo sri : runtimes) {
+			if (name.equals(sri.getName())) {
+				return true;
+			}
+		}
+		return false;
+	}
+	
+	@Test
+	public void testRuntimesInfo() throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(RestTestBase.getBaseURI() + "/engine/runtimes");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Runtime Info returned: " + s);
+		checkResult(httpResp, HttpStatus.SC_OK);
+		ServiceRuntimesInfo sri = JSONUtils.fromJSON(s, ServiceRuntimesInfo.class);
+		Assert.assertNotNull(sri);
+		Assert.assertTrue(sri.getRuntimes().size() > 2);
+		Assert.assertTrue(containsRuntimeWithName(sri.getRuntimes(), "Java"));
+		Assert.assertTrue(containsRuntimeWithName(sri.getRuntimes(), "Spark"));
+		Assert.assertTrue(containsRuntimeWithName(sri.getRuntimes(), "HealthCheck"));
+
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java
new file mode 100755
index 0000000..d093a73
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.integrationtest.admin;
+
+import org.apache.atlas.odf.api.settings.MessagingConfiguration;
+import org.apache.atlas.odf.api.settings.ODFSettings;
+import org.apache.atlas.odf.core.Encryption;
+import org.apache.atlas.odf.rest.test.RestTestBase;
+import org.apache.http.HttpStatus;
+import org.junit.Assert;
+import org.junit.Test;
+
+import org.apache.atlas.odf.api.settings.KafkaMessagingConfiguration;
+import org.apache.atlas.odf.json.JSONUtils;
+
+public class SettingsResourceTest extends RestTestBase {
+
+	@Test
+	public void testSettingsRead() throws Exception {
+		ODFSettings settings = settingsRead();
+		Assert.assertNotNull(settings);
+		MessagingConfiguration msgConfig = settings.getMessagingConfiguration();
+		Assert.assertNotNull(msgConfig);
+		Assert.assertTrue(msgConfig instanceof KafkaMessagingConfiguration);
+		KafkaMessagingConfiguration kafkaMsgConfig = (KafkaMessagingConfiguration) msgConfig;
+		Assert.assertNotNull(kafkaMsgConfig.getKafkaConsumerConfig());
+		Assert.assertNotNull(kafkaMsgConfig.getKafkaConsumerConfig().getZookeeperConnectionTimeoutMs());
+
+		Assert.assertNotNull(settings.getUserDefined());
+	}
+
+	@Test
+	public void testPasswordEncryption() throws Exception {
+		ODFSettings settings = settingsRead();
+		settings.setOdfPassword("newOdfPassword");
+		ODFSettings configWithPlainPasswords = settings;
+		settingsWrite(JSONUtils.toJSON(configWithPlainPasswords), HttpStatus.SC_OK);
+		logger.info("Settings with plain password: " + JSONUtils.toJSON(configWithPlainPasswords));
+
+		// REST API must return hidden password
+		ODFSettings configWithHiddenPasswords = settingsRead();
+		String hiddenPasswordIdentifyier = "***hidden***";
+		Assert.assertEquals(configWithHiddenPasswords.getOdfPassword(), hiddenPasswordIdentifyier);
+
+		// Reset passwords
+		Assert.assertNotNull(System.getProperty("odf.test.password"));
+		settings = settingsRead();
+		settings.setOdfPassword(Encryption.decryptText(System.getProperty("odf.test.password")));
+		settingsWrite(JSONUtils.toJSON(settings), HttpStatus.SC_OK);
+	}
+
+	@Test
+	public void testSettingsWriteSuccess() throws Exception {
+		String configSnippet = "{ \"runAnalysisOnImport\": false }";
+		logger.info("Testing write settings success with JSON: " + configSnippet);
+		settingsWrite(configSnippet, HttpStatus.SC_OK);
+	}
+	
+	@Test
+	public void testSettingsWriteFailure() throws Exception {
+		String configSnippet = "{ \"runAnalysisOnImport\": \"someInvalidValue\" }";
+		logger.info("Testing write settings failure with JSON: " + configSnippet);
+		settingsWrite(configSnippet, HttpStatus.SC_INTERNAL_SERVER_ERROR);
+	}
+
+	@Test
+	public void testSettingsReset() throws Exception {
+		logger.info("Testing reset settings operation.");
+		String updatedId = "updatedInstanceId";
+		ODFSettings originalConfig = settingsRead();
+		String originalInstanceId = originalConfig.getInstanceId();
+		originalConfig.setInstanceId(updatedId);
+
+		settingsWrite(JSONUtils.toJSON(originalConfig), HttpStatus.SC_OK);
+		
+		ODFSettings newConfig = settingsRead();
+		Assert.assertEquals(updatedId, newConfig.getInstanceId());
+
+		settingsReset();
+
+		ODFSettings resetConfig = settingsRead();
+		String resetInstanceId = resetConfig.getInstanceId();
+
+		Assert.assertEquals(originalInstanceId, resetInstanceId);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java
new file mode 100755
index 0000000..21b7887
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java
@@ -0,0 +1,47 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.integrationtest.analysis.test;
+
+import java.io.InputStream;
+
+import org.apache.atlas.odf.core.Utils;
+import org.apache.atlas.odf.rest.test.RestTestBase;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.fluent.Executor;
+import org.apache.http.client.fluent.Request;
+import org.apache.http.client.fluent.Response;
+import org.apache.wink.json4j.JSONObject;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class ODFVersionTest extends RestTestBase {
+
+	@Test
+	public void testVersion() throws Exception {
+		Executor exec = getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(RestTestBase.getBaseURI() + "/engine/version");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Version request returned: " + s);
+
+		JSONObject jo = new JSONObject(s);
+		String version = jo.getString("version");
+		Assert.assertNotNull(version);
+		Assert.assertTrue(version.startsWith("1.2.0-"));
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java
new file mode 100755
index 0000000..4900d63
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java
@@ -0,0 +1,174 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.integrationtest.annotations;
+
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.UUID;
+import java.util.logging.Logger;
+
+import org.apache.atlas.odf.api.annotation.Annotations;
+import org.apache.atlas.odf.api.metadata.models.Annotation;
+import org.apache.atlas.odf.api.metadata.models.DataFile;
+import org.apache.atlas.odf.integrationtest.metadata.MetadataResourceTest;
+import org.apache.atlas.odf.rest.test.RestTestBase;
+import org.apache.http.HttpResponse;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.fluent.Executor;
+import org.apache.http.client.fluent.Request;
+import org.apache.http.client.fluent.Response;
+import org.apache.http.entity.ContentType;
+import org.apache.wink.json4j.JSON;
+import org.apache.wink.json4j.JSONObject;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.atlas.odf.api.metadata.MetaDataObjectReference;
+import org.apache.atlas.odf.api.metadata.models.ProfilingAnnotation;
+import org.apache.atlas.odf.json.JSONUtils;
+
+public class AnnotationsResourceTest extends RestTestBase {
+	Logger logger = Logger.getLogger(AnnotationsResourceTest.class.getName());
+
+	@Before
+	public void createSampleData() throws Exception {
+		Executor exec = getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(getBaseURI() + "/metadata/sampledata");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		checkResult(httpResp, HttpStatus.SC_OK);
+	}
+
+	public static class AnnotationsResourceTestProfilingAnnotation extends ProfilingAnnotation {
+		private String newAnnotProp;
+
+		public String getNewAnnotProp() {
+			return newAnnotProp;
+		}
+
+		public void setNewAnnotProp(String newAnnotProp) {
+			this.newAnnotProp = newAnnotProp;
+		}
+
+	}
+
+	static String newAnnotPropValue = "newAnnotPropValue" + UUID.randomUUID().toString();
+	static String newAnnotPropKey = "newAnnotProp";
+
+	static String unknownAnnotType = "UnknownAnnotType" + UUID.randomUUID().toString();
+
+	List<Annotation> createTestAnnotations(MetaDataObjectReference ref, String reqId) {
+		List<Annotation> result = new ArrayList<>();
+		AnnotationsResourceTestProfilingAnnotation annot = new AnnotationsResourceTestProfilingAnnotation();
+		annot.setProfiledObject(ref);
+		annot.setNewAnnotProp(newAnnotPropValue);
+		annot.setAnalysisRun(reqId);
+		result.add(annot);
+
+		ProfilingAnnotation genericAnnot = new ProfilingAnnotation();
+		genericAnnot.setProfiledObject(ref);
+		genericAnnot.setAnalysisRun(reqId);
+		genericAnnot.setJsonProperties("{\"" + newAnnotPropKey + "\": \"" + newAnnotPropValue + "\"}");
+		result.add(genericAnnot);
+
+		return result;
+	}
+
+	MetaDataObjectReference getTestDataSetRef() throws Exception {
+		String s = MetadataResourceTest.getAllMetadataObjectsOfType("DataFile");
+		logger.info("Retrieved test data set refs: " + s);
+		List<DataFile> dfRefs = JSONUtils.fromJSONList(s, DataFile.class);
+		return dfRefs.get(0).getReference();
+	}
+
+	@Test
+	public void testAnnotationStore() throws Exception {
+		MetaDataObjectReference dfRef = getTestDataSetRef();
+		String reqId = "TestRequestId" + UUID.randomUUID().toString();
+		logger.info("Test Annotatoin store with request ID: " + reqId);
+		List<Annotation> newAnnots = createTestAnnotations(dfRef, reqId);
+
+		Executor exec = getRestClientManager().getAuthenticatedExecutor();
+		List<String> createdAnnotIds = new ArrayList<>();
+		// create annotations
+		for (Annotation annot : newAnnots) {
+			String restRequestBody = JSONUtils.toJSON(annot);
+			logger.info("Creating annotation via request " + restRequestBody);
+			Request req = Request.Post(getBaseURI() + "/annotations").bodyString(restRequestBody, ContentType.APPLICATION_JSON);
+			Response resp = exec.execute(req);
+			HttpResponse httpResp = resp.returnResponse();
+			checkResult(httpResp, HttpStatus.SC_CREATED);
+			InputStream is = httpResp.getEntity().getContent();
+			MetaDataObjectReference createdAnnot = JSONUtils.fromJSON(is, MetaDataObjectReference.class);
+			Assert.assertNotNull(createdAnnot);
+			Assert.assertNotNull(createdAnnot.getId());
+			createdAnnotIds.add(createdAnnot.getId());
+		}
+		logger.info("Annotations created, now retrieving them again: " + createdAnnotIds);
+
+		// check retrieval
+		Request req = Request.Get(getBaseURI() + "/annotations?assetReference=" + dfRef.getId());
+		Response resp = exec.execute(req);
+
+		HttpResponse httpResp = resp.returnResponse();
+		checkResult(httpResp, HttpStatus.SC_OK);
+		Annotations retrieveResult = JSONUtils.fromJSON(httpResp.getEntity().getContent(), Annotations.class);
+		List<Annotation> retrievedAnnots = retrieveResult.getAnnotations();
+		logger.info("Retrieved annotations: " + retrievedAnnots);
+		int foundAnnots = 0;
+		for (Annotation retrievedAnnot : retrievedAnnots) {
+			logger.info("Checking annotation: " + retrievedAnnot.getReference());
+			logger.info("Annotation " + retrievedAnnot.getReference().getId() + " has request ID: " + retrievedAnnot.getAnalysisRun());
+			if (reqId.equals(retrievedAnnot.getAnalysisRun())) {
+				logger.info("Checking annotation " + retrievedAnnot + " of class " + retrievedAnnot.getClass());
+				Assert.assertTrue(retrievedAnnot instanceof ProfilingAnnotation);
+
+				if (retrievedAnnot instanceof AnnotationsResourceTestProfilingAnnotation) {
+					AnnotationsResourceTestProfilingAnnotation tpa = (AnnotationsResourceTestProfilingAnnotation) retrievedAnnot;
+					Assert.assertEquals(dfRef, tpa.getProfiledObject());
+					Assert.assertEquals(newAnnotPropValue, tpa.getNewAnnotProp());
+				} else {
+					// other annotations are "unknown", thus no subclass of ProfilingAnnotation
+					Assert.assertTrue(retrievedAnnot.getClass().equals(ProfilingAnnotation.class));
+					
+					String jsonProps = retrievedAnnot.getJsonProperties();
+					Assert.assertNotNull(jsonProps);
+					JSONObject jo = (JSONObject) JSON.parse(jsonProps);
+					Assert.assertTrue(jo.containsKey(newAnnotPropKey));
+					Assert.assertEquals(newAnnotPropValue, jo.getString(newAnnotPropKey));
+				}
+				Assert.assertTrue(createdAnnotIds.contains(retrievedAnnot.getReference().getId()));
+				foundAnnots++;
+				
+				// check that retrieval by Id works
+				logger.info("Retrieving annotation " + retrievedAnnot.getReference().getId() + " again");
+				String url = getBaseURI() + "/annotations/objects/" + retrievedAnnot.getReference().getId();
+				logger.info("Retriveing annotation with URL: " + url);
+				Request req1 = Request.Get(url);
+				Response resp1 = exec.execute(req1);
+
+				HttpResponse httpResp1 = resp1.returnResponse();
+				checkResult(httpResp1, HttpStatus.SC_OK);
+				Annotation newRetrievedAnnot = JSONUtils.fromJSON(httpResp1.getEntity().getContent(), Annotation.class);
+				Assert.assertEquals(retrievedAnnot.getReference(), newRetrievedAnnot.getReference());
+				Assert.assertEquals(retrievedAnnot.getClass(), newRetrievedAnnot.getClass());
+				Assert.assertEquals(retrievedAnnot.getJsonProperties(), newRetrievedAnnot.getJsonProperties());
+			}
+		}
+		Assert.assertEquals(createdAnnotIds.size(), foundAnnots);
+
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java
new file mode 100755
index 0000000..d76a272
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.integrationtest.metadata;
+
+import java.io.InputStream;
+import java.util.logging.Logger;
+
+import org.apache.atlas.odf.api.metadata.MetadataStore;
+import org.apache.atlas.odf.rest.test.RestTestBase;
+import org.apache.http.HttpResponse;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.fluent.Executor;
+import org.apache.http.client.fluent.Request;
+import org.apache.http.client.fluent.Response;
+import org.apache.http.client.utils.URIBuilder;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.atlas.odf.api.ODFFactory;
+import org.apache.atlas.odf.api.metadata.models.BusinessTerm;
+import org.apache.atlas.odf.api.metadata.models.DataFile;
+import org.apache.atlas.odf.json.JSONUtils;
+
+public class MetadataResourceTest extends RestTestBase {
+
+	static Logger logger = Logger.getLogger(MetadataResourceTest.class.getName());
+
+	@Before
+	public void createSampleData() throws Exception {
+		Executor exec = getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(getBaseURI() + "/metadata/sampledata");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		checkResult(httpResp, HttpStatus.SC_OK);
+	}
+
+	public static String getAllMetadataObjectsOfType(String dataType) throws Exception {
+		MetadataStore mdsForQueryGeneration = new ODFFactory().create().getMetadataStore();
+		String query = mdsForQueryGeneration.newQueryBuilder().objectType(dataType).build();
+		logger.info("Metadata search query metadata " + query);
+
+		URIBuilder builder = new URIBuilder(getBaseURI() + "/metadata/search").addParameter("query", query);
+		String uri = builder.build().toString();
+		logger.info("Searching against URL: " + uri);
+		Request req = Request.Get(uri);
+		Response response = getRestClientManager().getAuthenticatedExecutor().execute(req);
+		HttpResponse httpResp = response.returnResponse();
+		Assert.assertEquals(HttpStatus.SC_OK, httpResp.getStatusLine().getStatusCode());
+		InputStream is = httpResp.getEntity().getContent();
+		String s = JSONUtils.getInputStreamAsString(is, "UTF-8");
+		is.close();
+		logger.info("Response: " + s);
+		return s;
+	}
+
+	@Test
+	public void testMetadataResourceSearchOMDataFile() throws Exception {
+		String s = getAllMetadataObjectsOfType("DataFile");
+		Assert.assertTrue(s.contains("DataFile")); // minimal checking that JSON contains something useful and specific to this type
+		JSONUtils.fromJSONList(s, DataFile.class);
+	}
+
+	@Test
+	public void testMetadataResourceSearchOMBusinessTerm() throws Exception {
+		String s = getAllMetadataObjectsOfType("BusinessTerm");
+		Assert.assertTrue(s.contains("BusinessTerm")); // minimal checking that JSON contains something useful and specific to this type
+		JSONUtils.fromJSONList(s, BusinessTerm.class);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java
new file mode 100755
index 0000000..c70c500
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.integrationtest.metadata;
+
+import java.net.URISyntaxException;
+import java.util.Properties;
+
+import org.apache.atlas.odf.core.Encryption;
+import org.apache.atlas.odf.core.integrationtest.metadata.MetadataStoreTestBase;
+import org.apache.atlas.odf.rest.test.RestTestBase;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.atlas.odf.api.metadata.MetaDataObjectReference;
+import org.apache.atlas.odf.api.metadata.MetadataQueryBuilder;
+import org.apache.atlas.odf.api.metadata.MetadataStore;
+import org.apache.atlas.odf.api.metadata.MetadataStoreException;
+import org.apache.atlas.odf.api.metadata.RemoteMetadataStore;
+
+public class RemoteMetadataStoreTest extends MetadataStoreTestBase {
+
+	protected MetadataStore getMetadataStore() {
+		RemoteMetadataStore rms = null;
+		try {
+			rms = new RemoteMetadataStore(RestTestBase.getOdfBaseUrl(), RestTestBase.getOdfUser(), Encryption.decryptText(RestTestBase.getOdfPassword()), true);
+		} catch (MetadataStoreException | URISyntaxException e) {
+			throw new RuntimeException("Error connecting to remote metadata store,", e);
+		}
+		return rms;
+	}
+
+	//TODO: Remove all methods below this comment once the DefaultMetadataStore is queue-based (issue #122)
+	// RemoteMetadataStore will then use the exact same test cases as the other (writable) metadata stores 
+
+	@Before
+	public void createSampleData() {
+		//TODO: Remove this method once the DefaultMetadataStore is queue-based (issue #122)
+		MetadataStore mds = getMetadataStore();
+		mds.resetAllData();
+		mds.createSampleData();
+	}
+
+	@Test
+	public void testProperties() throws Exception {
+		//TODO: Remove this method once the DefaultMetadataStore is queue-based (issue #122)
+		RemoteMetadataStore rms = new RemoteMetadataStore(RestTestBase.getOdfBaseUrl(), RestTestBase.getOdfUser(), Encryption.decryptText(RestTestBase.getOdfPassword()), true);
+		Properties props = rms.getProperties();
+		Assert.assertNotNull(props);
+		Assert.assertTrue(!props.isEmpty());
+	}
+
+	@Test
+	public void testReferences() throws Exception {
+		//TODO: Do not overwrite original method once DefaultMetadataStore is queue-based
+		MetadataStore mds = getMetadataStore();
+		MetadataStoreTestBase.checkReferences(mds, MetadataStoreTestBase.getDataFileTestObject(mds));
+	}
+
+	@Test
+	public void testSearchAndRetrieve() {
+		//TODO: Do not overwrite original method once DefaultMetadataStore is queue-based
+
+		// Test retrieve
+		MetadataStore mds = getMetadataStore();
+		MetaDataObjectReference bankClientsShortRef = mds.search(mds.newQueryBuilder().objectType("DataFile").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "BankClientsShort").build()).get(0);
+		Assert.assertEquals("The metadata store did not retrieve the object with the expected name.", "BankClientsShort", mds.retrieve(bankClientsShortRef).getName());
+
+		// Test queries with conditions
+		checkQueryResults(mds, new String[] { "BankClientsShort" }, mds.newQueryBuilder().objectType("DataFile").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "BankClientsShort").build(), false);
+		checkQueryResults(mds, new String[] { "SimpleExampleTable" }, mds.newQueryBuilder().objectType("DataFile").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.NOT_EQUALS, "BankClientsShort").build(), false);
+		checkQueryResults(mds, new String[] { "NAME" },
+				mds.newQueryBuilder().objectType("Column").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "NAME").simpleCondition("dataType", MetadataQueryBuilder.COMPARATOR.EQUALS, "string").build(), false);
+
+		// Test type hierarchy
+		checkQueryResults(mds, new String[] { "BankClientsShort", "SimpleExampleTable" }, mds.newQueryBuilder().objectType("DataFile").build(), true);
+		checkQueryResults(mds, new String[] { "BankClientsShort", "SimpleExampleTable" }, mds.newQueryBuilder().objectType("RelationalDataSet").build(), true);
+		checkQueryResults(mds, new String[] { "BankClientsShort", "SimpleExampleTable", "Simple URL example document", "Simple local example document" }, mds.newQueryBuilder().objectType("DataSet").build(), false);
+		checkQueryResults(mds, new String[] { "BankClientsShort" }, mds.newQueryBuilder().objectType("MetaDataObject").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "BankClientsShort").build(), false);
+	}
+
+	@Test
+	public void testAnnotations() {
+		//TODO: Remove this method once the DefaultMetadataStore is queue-based (issue #122)
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java
new file mode 100755
index 0000000..d7bbc0f
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.integrationtest.spark;
+
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.atlas.odf.api.analysis.AnalysisRequest;
+import org.apache.atlas.odf.api.metadata.MetadataStore;
+import org.apache.atlas.odf.api.metadata.models.Annotation;
+import org.apache.atlas.odf.api.metadata.models.RelationalDataSet;
+import org.apache.atlas.odf.api.settings.ODFSettings;
+import org.apache.atlas.odf.rest.test.RestTestBase;
+import org.apache.http.HttpResponse;
+import org.apache.http.HttpStatus;
+import org.apache.http.client.fluent.Executor;
+import org.apache.http.client.fluent.Request;
+import org.apache.http.client.fluent.Response;
+import org.apache.wink.json4j.JSONException;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import org.apache.atlas.odf.api.metadata.RemoteMetadataStore;
+import org.apache.atlas.odf.core.Encryption;
+import org.apache.atlas.odf.api.ODFFactory;
+import org.apache.atlas.odf.core.Utils;
+import org.apache.atlas.odf.api.analysis.AnalysisRequestStatus.State;
+import org.apache.atlas.odf.api.annotation.AnnotationStore;
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceEndpoint;
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceProperties;
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceSparkEndpoint;
+import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceSparkEndpoint.SERVICE_INTERFACE_TYPE;
+import org.apache.atlas.odf.core.integrationtest.metadata.internal.spark.SparkDiscoveryServiceLocalTest;
+import org.apache.atlas.odf.core.integrationtest.metadata.internal.spark.SparkDiscoveryServiceLocalTest.DATASET_TYPE;
+import org.apache.atlas.odf.api.settings.SparkConfig;
+import org.apache.atlas.odf.json.JSONUtils;
+
+public class SparkDiscoveryServiceWebTest extends RestTestBase {
+	protected static Logger logger = Logger.getLogger(SparkDiscoveryServiceWebTest.class.getName());
+
+	@Before
+	public void createSampleData() throws Exception {
+		Executor exec = getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(getBaseURI() + "/metadata/sampledata");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		checkResult(httpResp, HttpStatus.SC_OK);
+	}
+
+	public static DiscoveryServiceProperties getSparkSummaryStatisticsService() throws JSONException {
+		DiscoveryServiceProperties dsProperties = new DiscoveryServiceProperties();
+		dsProperties.setId(SparkDiscoveryServiceLocalTest.DISCOVERY_SERVICE_ID);
+		dsProperties.setName("Spark summary statistics service");
+		dsProperties.setDescription("Example discovery service calling summary statistics Spark application");
+		dsProperties.setIconUrl("spark.png");
+		dsProperties.setLink("http://www.spark.apache.org");
+		dsProperties.setPrerequisiteAnnotationTypes(null);
+		dsProperties.setResultingAnnotationTypes(null);
+		dsProperties.setSupportedObjectTypes(null);
+		dsProperties.setAssignedObjectTypes(null);
+		dsProperties.setAssignedObjectCandidates(null);
+		dsProperties.setParallelismCount(2);
+		DiscoveryServiceSparkEndpoint endpoint = new DiscoveryServiceSparkEndpoint();
+		endpoint.setJar("file:///tmp/odf-spark/odf-spark-example-application-1.2.0-SNAPSHOT.jar");
+		endpoint.setInputMethod(SERVICE_INTERFACE_TYPE.DataFrame);
+		endpoint.setClassName("org.apache.atlas.odf.core.spark.SummaryStatistics");
+		dsProperties.setEndpoint(JSONUtils.convert(endpoint, DiscoveryServiceEndpoint.class));
+		return dsProperties;
+	}
+
+	public void runSparkServiceTest(SparkConfig sparkConfig, DATASET_TYPE dataSetType, DiscoveryServiceProperties regInfo, String[] annotationNames) throws Exception{
+		logger.log(Level.INFO, "Testing spark application on ODF webapp url {0}.", getOdfBaseUrl());
+
+		logger.info("Using Spark configuration: " + JSONUtils.toJSON(sparkConfig));
+		ODFSettings settings = settingsRead();
+		settings.setSparkConfig(sparkConfig);
+		settings.setOdfUrl(Utils.getSystemPropertyExceptionIfMissing("odf.test.webapp.url"));
+		settingsWrite(JSONUtils.toJSON(settings), HttpStatus.SC_OK);
+
+		logger.log(Level.INFO, "Trying to delete existing discovery service: " + SparkDiscoveryServiceLocalTest.DISCOVERY_SERVICE_ID);
+		deleteService(SparkDiscoveryServiceLocalTest.DISCOVERY_SERVICE_ID);
+
+		logger.info("Using discovery service: " + JSONUtils.toJSON(regInfo));
+		createService(JSONUtils.toJSON(regInfo), HttpStatus.SC_OK);
+
+		checkServiceExists(regInfo.getId());
+
+		MetadataStore mds = new RemoteMetadataStore(getOdfBaseUrl(), getOdfUser(), Encryption.decryptText(getOdfPassword()), true);
+		Assert.assertNotNull(mds);
+
+
+		RelationalDataSet dataSet = null;
+		if (dataSetType == DATASET_TYPE.FILE) {
+			dataSet = SparkDiscoveryServiceLocalTest.getTestDataFile(mds);
+		} else if (dataSetType == DATASET_TYPE.TABLE) {
+			dataSet = SparkDiscoveryServiceLocalTest.getTestTable(mds);
+		} else {
+			Assert.assertTrue(false);
+		}
+		logger.info("Using dataset: " + JSONUtils.toJSON(dataSet));
+
+		AnnotationStore as = new ODFFactory().create().getAnnotationStore();
+
+		AnalysisRequest request = SparkDiscoveryServiceLocalTest.getSparkAnalysisRequest(dataSet);
+		logger.info("Using analysis request: " + JSONUtils.toJSON(request));
+
+		logger.info("Starting analysis...");
+		String requestId = runAnalysis(request, State.FINISHED);
+
+		List<Annotation> annots = as.getAnnotations(null, requestId);
+		logger.info("Number of annotations created: " + annots.size());
+		Assert.assertTrue("No annotations have been created.", annots.size() > 0);
+	}
+
+	@Test
+	public void testSparkServiceRESTAPI() throws Exception{
+		runSparkServiceTest(SparkDiscoveryServiceLocalTest.getLocalSparkConfig(), DATASET_TYPE.FILE, getSparkSummaryStatisticsService(), new String[] { "SparkSummaryStatisticsAnnotation", "SparkTableAnnotation" });
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java
----------------------------------------------------------------------
diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java
new file mode 100755
index 0000000..e23dd4e
--- /dev/null
+++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java
@@ -0,0 +1,289 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.atlas.odf.rest.test;
+
+import java.io.InputStream;
+import java.net.URI;
+import java.text.MessageFormat;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.http.Header;
+import org.apache.http.HttpResponse;
+import org.apache.http.HttpStatus;
+import org.apache.http.StatusLine;
+import org.apache.http.client.fluent.Executor;
+import org.apache.http.client.fluent.Request;
+import org.apache.http.client.fluent.Response;
+import org.apache.http.client.utils.URIBuilder;
+import org.apache.http.entity.ContentType;
+import org.apache.http.message.BasicHeader;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+
+import org.apache.atlas.odf.core.Encryption;
+import org.apache.atlas.odf.core.Utils;
+import org.apache.atlas.odf.api.analysis.AnalysisRequestTrackers;
+import org.apache.atlas.odf.api.analysis.AnalysisRequest;
+import org.apache.atlas.odf.api.analysis.AnalysisRequestStatus;
+import org.apache.atlas.odf.api.analysis.AnalysisRequestSummary;
+import org.apache.atlas.odf.api.analysis.AnalysisResponse;
+import org.apache.atlas.odf.api.annotation.Annotations;
+import org.apache.atlas.odf.api.analysis.AnalysisRequestStatus.State;
+import org.apache.atlas.odf.api.connectivity.RESTClientManager;
+import org.apache.atlas.odf.api.settings.ODFSettings;
+import org.apache.atlas.odf.api.utils.ODFLogConfig;
+import org.apache.atlas.odf.core.test.TestEnvironment;
+import org.apache.atlas.odf.json.JSONUtils;
+
+public class RestTestBase {
+
+	protected static Logger logger = Logger.getLogger(RestTestBase.class.getName());
+
+	@BeforeClass
+	public static void setup() throws Exception {
+		ODFLogConfig.run();
+		TestEnvironment.startMessaging();
+	}
+	
+	protected static void checkResult(HttpResponse httpResponse, int expectedCode) {
+		StatusLine sl = httpResponse.getStatusLine();
+		int code = sl.getStatusCode();
+		logger.info("Http request returned: " + code + ", message: " + sl.getReasonPhrase());
+		Assert.assertEquals(expectedCode, code);
+	}
+
+	public static RESTClientManager getRestClientManager() {
+		return new RESTClientManager(URI.create(getOdfUrl()), getOdfUser(), Encryption.decryptText(getOdfPassword()));
+	}
+
+	public static String getOdfBaseUrl() {
+		String odfBaseURL = System.getProperty("odf.test.base.url");
+		return odfBaseURL;
+	}
+
+	public static String getOdfUrl() {
+		String odfURL = System.getProperty("odf.test.webapp.url");
+		return odfURL;
+	}
+
+	public static String getOdfUser() {
+		String odfUser = System.getProperty("odf.test.user");
+		return odfUser;
+	}
+
+	public static String getOdfPassword() {
+		String odfPassword = System.getProperty("odf.test.password");
+		return odfPassword;
+	}
+
+	public static String getBaseURI() {
+		return getOdfBaseUrl() + "/odf/api/v1";
+	}
+
+	public String runAnalysis(AnalysisRequest request, State expectedState) throws Exception {
+		Executor exec = getRestClientManager().getAuthenticatedExecutor();
+		String json = JSONUtils.toJSON(request);
+		logger.info("Starting analysis via POST request: " + json);
+
+		Header header = new BasicHeader("Content-Type", "application/json");
+		Request req = Request.Post(getBaseURI() + "/analyses").bodyString(json, ContentType.APPLICATION_JSON).addHeader(header);
+
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		checkResult(httpResp, HttpStatus.SC_OK);
+
+		InputStream is = httpResp.getEntity().getContent();
+		String jsonResponse = JSONUtils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Analysis response: " + jsonResponse);
+		AnalysisResponse analysisResponse = JSONUtils.fromJSON(jsonResponse, AnalysisResponse.class);
+		Assert.assertNotNull(analysisResponse);
+		String requestId = analysisResponse.getId();
+		Assert.assertNotNull(requestId);
+		logger.info("Request Id: " + requestId);
+
+		Assert.assertTrue(! analysisResponse.isInvalidRequest());
+		
+		AnalysisRequestStatus status = null;
+		int maxPolls = 400;
+		do {
+			Request statusRequest = Request.Get(getBaseURI() + "/analyses/" + requestId);
+			logger.info("Getting analysis status");
+			resp = exec.execute(statusRequest);
+			httpResp = resp.returnResponse();
+			checkResult(httpResp, HttpStatus.SC_OK);
+
+			String statusResponse = JSONUtils.getInputStreamAsString(httpResp.getEntity().getContent(), "UTF-8");
+			logger.info("Analysis status: " + statusResponse);
+			status = JSONUtils.fromJSON(statusResponse, AnalysisRequestStatus.class);
+
+			logger.log(Level.INFO, "Poll request for request ID ''{0}'' (expected state: ''{1}'', details: ''{2}''", new Object[] { requestId, status.getState(), status.getDetails(), State.FINISHED });
+			maxPolls--;
+			Thread.sleep(1000);
+		} while (maxPolls > 0 && (status.getState() == State.ACTIVE || status.getState() == State.QUEUED));
+		Assert.assertEquals(State.FINISHED, status.getState());
+		return requestId;
+	}
+
+	public void createService(String serviceJSON, int expectedCode) throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Header header = new BasicHeader("Content-Type", "application/json");
+
+		Request req = Request.Post(RestTestBase.getBaseURI() + "/services")//
+				.bodyString(serviceJSON, ContentType.APPLICATION_JSON) //
+		.addHeader(header);
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		is.close();
+		logger.info("Create service request return code: " + httpResp.getStatusLine().getStatusCode() + ", content: " + s);
+		checkResult(httpResp, expectedCode);
+	}
+	
+	public void checkServiceExists(String serviceId) throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Header header = new BasicHeader("Content-Type", "application/json");
+
+		Request req = Request.Get(RestTestBase.getBaseURI() + "/services/" + serviceId).addHeader(header);
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		is.close();
+		logger.info("Get service request return code: " + httpResp.getStatusLine().getStatusCode() + ", content: " + s);
+		checkResult(httpResp, 200);
+		
+	}
+
+	public void deleteService(String serviceId, int expectedCode) throws Exception {
+		checkResult(this.deleteService(serviceId), expectedCode);
+	}
+
+	public HttpResponse deleteService(String serviceId) throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Header header = new BasicHeader("Content-Type", "application/json");
+		URIBuilder uri = new URIBuilder(RestTestBase.getBaseURI() + "/services/" + serviceId + "/cancel");
+		Request req = Request.Post(uri.build())//
+				.addHeader(header);
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		is.close();
+		logger.info("Delete service request returned: " + s);
+		return httpResp;
+	}
+
+	public ODFSettings settingsRead() throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(RestTestBase.getBaseURI() + "/settings");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Settings read request returned: " + s);
+		is.close();
+		checkResult(httpResp, HttpStatus.SC_OK);
+		return JSONUtils.fromJSON(s, ODFSettings.class);
+	}
+
+	public void settingsWrite(String configSnippet, int expectedCode) throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Header header = new BasicHeader("Content-Type", "application/json");
+
+		Request req = Request.Put(RestTestBase.getBaseURI() + "/settings")//
+				.bodyString(configSnippet, ContentType.APPLICATION_JSON) //
+		.addHeader(header);
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		is.close();
+		logger.info("Settings write request returned: " + s);
+		checkResult(httpResp, expectedCode);
+	}
+
+	public void settingsReset() throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Header header = new BasicHeader("Content-Type", "application/json");
+		Request req = Request.Post(RestTestBase.getBaseURI() + "/settings/reset")//
+		.addHeader(header);
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		is.close();
+		logger.info("Config reset request returned: " + s);
+		checkResult(httpResp, HttpStatus.SC_OK);
+	}
+
+	public void cancelAnalysisRequest(String requestId, int expectedCode) throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Header header = new BasicHeader("Content-Type", "application/json");
+
+		Request req = Request.Post(RestTestBase.getBaseURI() + "/analyses/" + requestId + "/cancel").addHeader(header);
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		is.close();
+		logger.info("Cancel analyses request returned: " + s);
+		checkResult(httpResp, expectedCode);
+	}
+
+	public AnalysisRequestTrackers getAnalysesRequests(int offset, int limit) throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(MessageFormat.format("{0}/analyses?offset={1}&limit={2}", RestTestBase.getBaseURI(), offset, limit));
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Analyses read request returned: " + s);
+		is.close();
+		checkResult(httpResp, HttpStatus.SC_OK);
+		return JSONUtils.fromJSON(s, AnalysisRequestTrackers.class);
+	}
+
+	public AnalysisRequestSummary getAnalysesStats() throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		Request req = Request.Get(RestTestBase.getBaseURI() + "/analyses/stats");
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Analyses statistics request returned: " + s);
+		is.close();
+		checkResult(httpResp, HttpStatus.SC_OK);
+		return JSONUtils.fromJSON(s, AnalysisRequestSummary.class);
+	}
+
+	public Annotations getAnnotations(String analysisRequestId) throws Exception {
+		Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor();
+		URIBuilder uri = new URIBuilder(RestTestBase.getBaseURI() + "/annotations").addParameter("analysisRequestId", analysisRequestId);
+		Request req = Request.Get(uri.build());
+		Response resp = exec.execute(req);
+		HttpResponse httpResp = resp.returnResponse();
+		InputStream is = httpResp.getEntity().getContent();
+
+		String s = Utils.getInputStreamAsString(is, "UTF-8");
+		logger.info("Settings read request returned: " + s);
+		is.close();
+		checkResult(httpResp, HttpStatus.SC_OK);
+		return JSONUtils.fromJSON(s, Annotations.class);
+	}
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/webpack.config.js
----------------------------------------------------------------------
diff --git a/odf/odf-web/webpack.config.js b/odf/odf-web/webpack.config.js
new file mode 100755
index 0000000..380f705
--- /dev/null
+++ b/odf/odf-web/webpack.config.js
@@ -0,0 +1,65 @@
+/**
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+var path = require('path');
+
+const APP_ROOT="./src/main/webapp";
+const MAIN_FILE= path.resolve(APP_ROOT + "/scripts/odf-console.js");
+const CLIENT_FILE= path.resolve(APP_ROOT + "/scripts/odf-client.js");
+
+module.exports = {
+	entry: {
+		"odf-web": MAIN_FILE,
+		"odf-client": CLIENT_FILE
+	},
+
+    output: {
+        filename: "/[name].js",
+        path: path.resolve(APP_ROOT)
+    },
+
+    module: {
+	    loaders: [
+	      {
+	        test: /\.jsx?$/,
+	        loader: 'babel',
+	        query: {
+	            presets: ['react', 'es2015']
+	        },
+	        	include: /(webapp)/,
+	        	exlude: /(odf-web.js)/
+	      },
+	      {
+	    	  test: /\.(jsx|js)$/,
+	    	  loader: 'imports?jQuery=jquery,$=jquery,this=>window'
+	      },
+	      {
+	          test: /\.css$/,
+	          loader: 'style!css'
+	      },
+	      {
+	          test: /\.(png|jpg)$/,
+	          loader: 'url?limit=25000&name=resources/img/[hash].[ext]'
+	      },
+	      {
+	    	  test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/,
+        	  loader: 'url-loader?limit=25000&&minetype=application/font-woff&name=resources/fonts/[hash].[ext]'
+          },
+          {
+        	  test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/,
+	          loader: 'url?limit=25000&name=resources/fonts/[hash].[ext]'
+          }
+	    ]
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/pom.xml
----------------------------------------------------------------------
diff --git a/odf/pom.xml b/odf/pom.xml
new file mode 100755
index 0000000..2e1f263
--- /dev/null
+++ b/odf/pom.xml
@@ -0,0 +1,133 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<artifactId>odf</artifactId>
+	<name>odf</name>
+	<groupId>org.apache.atlas.odf</groupId>
+	<version>1.2.0-SNAPSHOT</version>
+	<packaging>pom</packaging>
+
+
+	<modules>
+		<module>odf-api</module>
+		<module>odf-core</module>
+		<module>odf-store</module>
+		<module>odf-messaging</module>
+	</modules>
+
+	<profiles>
+		<profile>
+			<id>atlas</id>
+			<modules>
+				<module>odf-atlas</module>
+			</modules>
+		</profile>
+		<profile>
+			<id>complete-build</id>
+			<activation>
+				<property>
+					<name>reduced-build</name>
+					<value>!true</value>
+				</property>
+			</activation>
+			<modules>
+				<module>odf-spark-example-application</module>
+				<module>odf-spark</module>
+				<module>odf-doc</module>
+				<module>odf-web</module>
+				<module>odf-archetype-discoveryservice</module>
+			</modules>
+		</profile>
+		<profile>
+			<id>test-env</id>
+			<modules>
+				<module>odf-test-env</module>
+			</modules>
+		</profile>
+	</profiles>
+
+	<properties>
+		<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+		<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+		<testZookeepeConnectionString>localhost:2181</testZookeepeConnectionString>
+		<odf.test.logdir>/tmp</odf.test.logdir>
+		<odf.unittest.logspec>ALL,${odf.test.logdir}/${project.name}-unit-trace.log</odf.unittest.logspec>
+		<odf.integrationtest.logspec>ALL,${odf.test.logdir}/${project.name}-integration-trace.log</odf.integrationtest.logspec>
+		<jackson.version>2.6.5</jackson.version>
+		<jetty.maven.plugin.port>58080</jetty.maven.plugin.port>
+		<odf.test.base.url>https://localhost:${jetty.maven.plugin.port}</odf.test.base.url>
+		<odf.test.webapp.url>https://localhost:${jetty.maven.plugin.port}/odf-web-1.2.0-SNAPSHOT</odf.test.webapp.url>
+		<odf.test.user>sdp</odf.test.user>
+		<odf.test.password>ZzTeX3hKtVORgks+2TaLPWxerucPBoxK</odf.test.password>
+		<atlas.version>0.7-incubating-release</atlas.version>
+		<atlas.url>https://localhost:21443</atlas.url>
+		<atlas.user>admin</atlas.user>
+		<atlas.password>UR0+HOiApXG9B8SNpKN5ww==</atlas.password>
+	</properties>
+
+	<build>
+		<plugins>
+			<!-- make sure we are compiling for Java 1.7 -->
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.3.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-antrun-plugin</artifactId>
+				<version>1.8</version>
+				<executions>
+					<execution>
+						<inherited>false</inherited>
+						<phase>test</phase>
+						<goals>
+							<goal>run</goal>
+						</goals>
+						<configuration>
+							<tasks>
+								<delete>
+									<fileset dir="/tmp/" includes="odf-test-execution-log.csv"/>
+								</delete>
+							</tasks>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-dependency-plugin</artifactId>
+				<version>2.10</version>
+				<executions>
+					<execution>
+						<id>list-dependencies</id>
+						<phase>validate</phase>
+						<goals>
+							<goal>tree</goal>
+						</goals>
+						<configuration>
+						</configuration>
+					</execution>
+				</executions>
+			</plugin>
+		</plugins>
+	</build>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/prepare_embedded_jetty.xml
----------------------------------------------------------------------
diff --git a/odf/prepare_embedded_jetty.xml b/odf/prepare_embedded_jetty.xml
new file mode 100755
index 0000000..c9aa044
--- /dev/null
+++ b/odf/prepare_embedded_jetty.xml
@@ -0,0 +1,90 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+~
+~ Licensed under the Apache License, Version 2.0 (the "License");
+~ you may not use this file except in compliance with the License.
+~ You may obtain a copy of the License at
+~
+~   http://www.apache.org/licenses/LICENSE-2.0
+~
+~ Unless required by applicable law or agreed to in writing, software
+~ distributed under the License is distributed on an "AS IS" BASIS,
+~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+~ See the License for the specific language governing permissions and
+~ limitations under the License.
+-->
+
+<project name="prepare_embedded_jetty">
+
+	<dirname property="script.basedir" file="${ant.file.prepare_embedded_jetty}" />
+	<property name="source-dir" value="${script.basedir}/jettyconfig" />
+	<property name="download-dir" value="${script.basedir}/target/downloads/jettyconfig" />
+	<property name="target-dir" value="${script.basedir}/target/jettyconfig" />
+
+	<condition property="is-windows">
+		<os family="windows">
+		</os>
+	</condition>
+
+	<condition property="is-unix">
+		<os family="unix">
+		</os>
+	</condition>
+
+	<condition property="is-mac">
+		<os family="mac">
+		</os>
+	</condition>
+
+	<condition property="config-available">
+	   <available file="${target-dir}"/>
+    </condition>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="create-directories">
+		<mkdir dir="${download-dir}"/>
+		<mkdir dir="${target-dir}"/>
+	</target>
+
+	<target name="copy-config-files">
+		<copy todir="${target-dir}">
+			<fileset dir="${source-dir}" />
+			<fileset dir="${download-dir}" />
+		</copy>
+	</target>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="download-keystore-file-windows" if="is-windows">
+		<get verbose="true" src="https://ibm.box.com/shared/static/k0qgh31ynbgnjsrbg5s97hsqbssh6pd4.jks" dest="${download-dir}/keystore.jks" />
+		<echo message="Downloaded IBM JDK keystore because we are on Windows." />
+	</target>
+
+	<target name="download-keystore-file-mac" if="is-mac">
+		<get verbose="true" src="https://ibm.box.com/shared/static/odnmhqua5sdue03z43vqsv0lp509ov70.jks" dest="${download-dir}/keystore.jks" />
+		<echo message="Downloaded OpenJDK keystore because we are on Mac." />
+	</target>
+
+	<target name="download-keystore-file-unix" if="is-unix">
+		<get verbose="true" src="https://ibm.box.com/shared/static/k0qgh31ynbgnjsrbg5s97hsqbssh6pd4.jks" dest="${download-dir}/keystore.jks" />
+		<echo message="Downloaded IBM JDK keystore because we are on UNIX (Other than Mac)." />
+	</target>
+
+	<target name="download-keystore-file" depends="download-keystore-file-unix,download-keystore-file-windows,download-keystore-file-mac">
+		<!-- keystore.jks file is stored in Box@IBM - Re-generate the file using the Java keytool -->
+		<!-- command: keytool -genkey -alias myatlas -keyalg RSA -keystore /tmp/atlas-security/keystore.jks -keysize 2048 -->
+		<!-- Note that ibm jdk uses different format than oracle/open jdk, therefore a separate version has to be generated for each jdk -->
+	</target>
+
+	<!-- ****************************************************************************************** -->
+
+	<target name="prepare-jetty-config" unless="config-available">
+		<echo message="Preparing jetty configuration..." />
+		<antcall target="create-directories" />
+		<antcall target="download-keystore-file"/>
+		<antcall target="copy-config-files"/>
+		<echo message="Jetty configuration completed." />
+	</target>
+
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index ebc07e2..23042e3 100644
--- a/pom.xml
+++ b/pom.xml
@@ -2009,6 +2009,11 @@
                         <exclude>**/policy-store.txt</exclude>
                         <exclude>**/*rebel*.xml</exclude>
                         <exclude>**/*rebel*.xml.bak</exclude>
+
+                        <!-- execute following files in ODF directory -->
+                        <exclude>**/.gitignore</exclude>
+                        <exclude>odf/**/*.csv</exclude>
+                        <exclude>odf/**/*.txt</exclude>
                     </excludes>
                 </configuration>
                 <executions>


Mime
View raw message