Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 349CE200CB5 for ; Wed, 28 Jun 2017 07:57:27 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 32DEE160BDC; Wed, 28 Jun 2017 05:57:27 +0000 (UTC) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id 11FA1160BF6 for ; Wed, 28 Jun 2017 07:57:24 +0200 (CEST) Received: (qmail 57104 invoked by uid 500); 28 Jun 2017 05:57:24 -0000 Mailing-List: contact commits-help@atlas.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@atlas.apache.org Delivered-To: mailing list commits@atlas.apache.org Received: (qmail 57085 invoked by uid 99); 28 Jun 2017 05:57:24 -0000 Received: from pnap-us-west-generic-nat.apache.org (HELO spamd1-us-west.apache.org) (209.188.14.142) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 28 Jun 2017 05:57:24 +0000 Received: from localhost (localhost [127.0.0.1]) by spamd1-us-west.apache.org (ASF Mail Server at spamd1-us-west.apache.org) with ESMTP id 9445BC6B31 for ; Wed, 28 Jun 2017 05:57:23 +0000 (UTC) X-Virus-Scanned: Debian amavisd-new at spamd1-us-west.apache.org X-Spam-Flag: NO X-Spam-Score: -4.231 X-Spam-Level: X-Spam-Status: No, score=-4.231 tagged_above=-999 required=6.31 tests=[KAM_ASCII_DIVIDERS=0.8, RCVD_IN_DNSWL_HI=-5, RCVD_IN_MSPIKE_H3=-0.01, RCVD_IN_MSPIKE_WL=-0.01, SPF_PASS=-0.001, T_RP_MATCHES_RCVD=-0.01] autolearn=disabled Received: from mx1-lw-eu.apache.org ([10.40.0.8]) by localhost (spamd1-us-west.apache.org [10.40.0.7]) (amavisd-new, port 10024) with ESMTP id AnmiwCv5LDCU for ; Wed, 28 Jun 2017 05:57:18 +0000 (UTC) Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by mx1-lw-eu.apache.org (ASF Mail Server at mx1-lw-eu.apache.org) with SMTP id 904735F613 for ; Wed, 28 Jun 2017 05:57:15 +0000 (UTC) Received: (qmail 56885 invoked by uid 99); 28 Jun 2017 05:57:14 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Wed, 28 Jun 2017 05:57:14 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id A5246DFA6C; Wed, 28 Jun 2017 05:57:14 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: madhan@apache.org To: commits@atlas.incubator.apache.org Date: Wed, 28 Jun 2017 05:57:14 -0000 Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: [01/25] incubator-atlas git commit: ATLAS-1898: initial commit of ODF archived-at: Wed, 28 Jun 2017 05:57:27 -0000 Repository: incubator-atlas Updated Branches: refs/heads/feature-odf 8101883cc -> 6d19e1295 http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/main/webapp/scripts/odf-utils.js ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/main/webapp/scripts/odf-utils.js b/odf/odf-web/src/main/webapp/scripts/odf-utils.js new file mode 100755 index 0000000..5684556 --- /dev/null +++ b/odf/odf-web/src/main/webapp/scripts/odf-utils.js @@ -0,0 +1,338 @@ +/** + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var $ = require("jquery"); +var React = require("react"); +var ODFGlobals = require("./odf-globals.js"); + +var Utils= { + + arraysEqual : function(arr1, arr2){ + var a = arr1; + var b = arr2; + if(arr1 == null){ + if(arr2 == null){ + return true; + } + return false; + }else{ + if(arr2 == null){ + return false; + } + } + + if(a.length != b.length){ + return false; + } + + var equal = true; + $.each(a, function(key, val){ + if(a[key] && !b[key]){ + equal = false; + return; + } + if(val && typeof val == "object"){ + equal = this.arraysEqual(val, b[key]); + return; + }else{ + if(val != b[key]){ + equal = false; + return; + } + } + }.bind(this)); + return equal; + }, + + AnnotationStoreHelper : { + loadAnnotationsForRequest : function(analysisRequestId, successCallback, errorCallback) { + var url = ODFGlobals.annotationsUrl + "?analysisRequestId=" + analysisRequestId; + return $.ajax({ + url: url, + type: 'GET', + success: function(data) { + if(successCallback){ + successCallback(data); + } + }, + error: function(xhr, status, err) { + if(errorCallback){ + errorCallback(err); + } + } + }); + } + }, + + AtlasHelper : { + + loadAtlasAssets : function(assets, successCallback, errorCallback){ + var reqs = []; + $.each(assets, function(key, val){ + reqs.push(this.loadAtlasAsset(val, successCallback, errorCallback)); + }.bind(this)); + return reqs; + }, + + loadMostRecentAnnotations : function(asset, successCallback, errorCallback) { + var url = ODFGlobals.annotationsUrl + "/newestAnnotations/" + encodeURIComponent(JSON.stringify({repositoryId: asset.repositoryId, id: asset.id})); + return $.ajax({ + url: url, + type: 'GET', + success: function(data) { + if(successCallback){ + successCallback(data); + } + }, + error: function(xhr, status, err) { + if(errorCallback){ + errorCallback(err); + } + } + }); + }, + + loadRelationalDataSet: function(dataSet, successCallback, errorCallback) { + var url = ODFGlobals.metadataUrl + "/asset/" + encodeURIComponent(JSON.stringify({repositoryId: dataSet.reference.repositoryId, id: dataSet.reference.id})) + "/columns"; + return $.ajax({ + url: url, + type: 'GET', + error: function(xhr, status, err) { + if(errorCallback){ + errorCallback(err); + } + } + }).then( function(cols){ + if(!cols){ + successCallback([]); + return []; + } + var requests = []; + var colRefs = []; + $.each(cols, function(key, val){ + var req = Utils.AtlasHelper.getColAnnotations(val); + requests.push(req); + colRefs.push(val.reference); + }.bind(this)); + dataSet.columns = colRefs; + $.when.apply(undefined, requests).done(function(){ + var data = []; + if(requests.length > 1){ + $.each(arguments, function(key, val){ + data.push(val); + }); + }else if(arguments[0]){ + data.push(arguments[0]); + } + successCallback(data); + }); + return requests; + }) + }, + + getColAnnotations: function(asset, successCallback, errorCallback) { + var refid = asset.reference.id; + var annotationsUrl = ODFGlobals.annotationsUrl + "?assetReference=" + encodeURIComponent(refid); + return $.ajax({ + url: annotationsUrl, + type: 'GET', + success: function(annotationData) { + asset.annotations = annotationData.annotations; + if (successCallback) { + successCallback(asset); + } + }, + error: function(xhr, status, err) { + if(errorCallback){ + errorCallback(err); + } + } + }).then(function(annotationData) { + asset.annotations = annotationData.annotations; + return asset; + }); + }, + + loadAtlasAsset : function(asset, successCallback, errorCallback){ + var url = ODFGlobals.metadataUrl + "/asset/" + encodeURIComponent(JSON.stringify({repositoryId: asset.repositoryId, id: asset.id})); + return $.ajax({ + url: url, + type: 'GET', + error: function(xhr, status, err) { + if(errorCallback){ + errorCallback(err); + } + } + }).then( function(data) { + var refid = data.reference.id; + var annotationsUrl = ODFGlobals.annotationsUrl + "?assetReference=" + encodeURIComponent(refid); + return $.ajax({ + url: annotationsUrl, + type: 'GET', + success: function(annotationData) { + data.annotations = annotationData.annotations; + if (successCallback) { + successCallback(data); + } + }, + error: function(xhr, status, err) { + if(errorCallback){ + errorCallback(err); + } + } + }).then(function(annotationData) { + data.annotations = annotationData.annotations; + return data; + }); + }); + }, + + searchAtlasMetadata : function(query, successCallback, errorCallback) { + var url = ODFGlobals.metadataUrl + "/search?" + $.param({query: query}); + var req = $.ajax({ + url: url, + dataType: 'json', + type: 'GET', + success: function(data) { + successCallback(data); + }, + error: function(xhr, status, err) { + console.error(url, status, err.toString()); + var msg = "Error while loading recent analysis requests: " + err.toString(); + errorCallback(msg); + } + }); + return req; + } + }, + + MetadataStore : { + + getProperties(successCallback, alertCallback) { + if (alertCallback) { + alertCallback({type: ""}); + } + return $.ajax({ + url: ODFGlobals.metadataUrl, + dataType: 'json', + type: 'GET', + success: successCallback, + error: function(xhr, status, err) { + if (alertCallback) { + var msg = "Error while reading metadata store properties: " + err.toString(); + alertCallback({type: "danger", message: msg}); + } + } + }); + } + }, + + ConfigurationStore : { + + // readUserDefinedProperties(successCallback, alertCallback) { + readConfig(successCallback, alertCallback) { + if (alertCallback) { + alertCallback({type: ""}); + } + // clear alert + + return $.ajax({ + url: ODFGlobals.apiPrefix + "settings", + dataType: 'json', + type: 'GET', + success: successCallback, + error: function(xhr, status, err) { + if (alertCallback) { + var msg = "Error while reading user defined properties: " + err.toString(); + alertCallback({type: "danger", message: msg}); + } + } + }); + }, + + updateConfig(config, successCallback, alertCallback) { + if (alertCallback) { + alertCallback({type: ""}); + } + + return $.ajax({ + url: ODFGlobals.apiPrefix + "settings", + contentType: "application/json", + dataType: 'json', + type: 'PUT', + data: JSON.stringify(config), + success: successCallback, + error: function(xhr, status, err) { + if (alertCallback) { + var msg = "Error while reading user defined properties: " + err.toString(); + alertCallback({type: "danger", message: msg}); + } + } + }); + } + }, + + ServicesStore : { + + // readUserDefinedProperties(successCallback, alertCallback) { + getServices(successCallback, alertCallback) { + if (alertCallback) { + alertCallback({type: ""}); + } + // clear alert + + return $.ajax({ + url: ODFGlobals.apiPrefix + "services", + dataType: 'json', + type: 'GET', + success: successCallback, + error: function(xhr, status, err) { + if (alertCallback) { + var msg = "Error while getting list of ODF services: " + err.toString(); + alertCallback({type: "danger", message: msg}); + } + } + }); + } + }, + + URLHelper : { + + getBaseHash : function(){ + var baseHash = "#" + document.location.hash.split("#")[1]; + var split = baseHash.split("/"); + if(split.length>0){ + return split[0]; + } + return ""; + }, + + setUrlHash : function(newAddition){ + if(!newAddition){ + newAddition = ""; + } + if(newAddition != "" && typeof newAddition === "object"){ + newAddition = JSON.stringify(newAddition); + } + var hash = document.location.hash; + var baseHash = this.getBaseHash(); + if(!hash.startsWith(baseHash)){ + return; + } + document.location.hash = baseHash + "/" + encodeURIComponent(newAddition); + } + } +}; + +module.exports = Utils; http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/main/webapp/swagger/index.html ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/main/webapp/swagger/index.html b/odf/odf-web/src/main/webapp/swagger/index.html new file mode 100755 index 0000000..4eb6ff1 --- /dev/null +++ b/odf/odf-web/src/main/webapp/swagger/index.html @@ -0,0 +1,142 @@ + + + + + + + Swagger UI + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+
+ + http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java new file mode 100755 index 0000000..6f23c0d --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/EngineResourceTest.java @@ -0,0 +1,79 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.integrationtest.admin; + +import java.io.InputStream; +import java.util.Collection; + +import org.apache.atlas.odf.rest.test.RestTestBase; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.client.fluent.Executor; +import org.apache.http.client.fluent.Request; +import org.apache.http.client.fluent.Response; +import org.junit.Assert; +import org.junit.Test; + +import org.apache.atlas.odf.api.engine.ServiceRuntimeInfo; +import org.apache.atlas.odf.api.engine.ServiceRuntimesInfo; +import org.apache.atlas.odf.api.engine.SystemHealth; +import org.apache.atlas.odf.core.Utils; +import org.apache.atlas.odf.json.JSONUtils; + +public class EngineResourceTest extends RestTestBase { + + @Test + public void testHealth() throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(RestTestBase.getBaseURI() + "/engine/health"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + + String s = Utils.getInputStreamAsString(is, "UTF-8"); + logger.info("Health check request returned: " + s); + checkResult(httpResp, HttpStatus.SC_OK); + SystemHealth health = JSONUtils.fromJSON(s, SystemHealth.class); + Assert.assertNotNull(health); + } + + boolean containsRuntimeWithName(Collection runtimes, String name) { + for (ServiceRuntimeInfo sri : runtimes) { + if (name.equals(sri.getName())) { + return true; + } + } + return false; + } + + @Test + public void testRuntimesInfo() throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(RestTestBase.getBaseURI() + "/engine/runtimes"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + + String s = Utils.getInputStreamAsString(is, "UTF-8"); + logger.info("Runtime Info returned: " + s); + checkResult(httpResp, HttpStatus.SC_OK); + ServiceRuntimesInfo sri = JSONUtils.fromJSON(s, ServiceRuntimesInfo.class); + Assert.assertNotNull(sri); + Assert.assertTrue(sri.getRuntimes().size() > 2); + Assert.assertTrue(containsRuntimeWithName(sri.getRuntimes(), "Java")); + Assert.assertTrue(containsRuntimeWithName(sri.getRuntimes(), "Spark")); + Assert.assertTrue(containsRuntimeWithName(sri.getRuntimes(), "HealthCheck")); + + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java new file mode 100755 index 0000000..d093a73 --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/admin/SettingsResourceTest.java @@ -0,0 +1,97 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.integrationtest.admin; + +import org.apache.atlas.odf.api.settings.MessagingConfiguration; +import org.apache.atlas.odf.api.settings.ODFSettings; +import org.apache.atlas.odf.core.Encryption; +import org.apache.atlas.odf.rest.test.RestTestBase; +import org.apache.http.HttpStatus; +import org.junit.Assert; +import org.junit.Test; + +import org.apache.atlas.odf.api.settings.KafkaMessagingConfiguration; +import org.apache.atlas.odf.json.JSONUtils; + +public class SettingsResourceTest extends RestTestBase { + + @Test + public void testSettingsRead() throws Exception { + ODFSettings settings = settingsRead(); + Assert.assertNotNull(settings); + MessagingConfiguration msgConfig = settings.getMessagingConfiguration(); + Assert.assertNotNull(msgConfig); + Assert.assertTrue(msgConfig instanceof KafkaMessagingConfiguration); + KafkaMessagingConfiguration kafkaMsgConfig = (KafkaMessagingConfiguration) msgConfig; + Assert.assertNotNull(kafkaMsgConfig.getKafkaConsumerConfig()); + Assert.assertNotNull(kafkaMsgConfig.getKafkaConsumerConfig().getZookeeperConnectionTimeoutMs()); + + Assert.assertNotNull(settings.getUserDefined()); + } + + @Test + public void testPasswordEncryption() throws Exception { + ODFSettings settings = settingsRead(); + settings.setOdfPassword("newOdfPassword"); + ODFSettings configWithPlainPasswords = settings; + settingsWrite(JSONUtils.toJSON(configWithPlainPasswords), HttpStatus.SC_OK); + logger.info("Settings with plain password: " + JSONUtils.toJSON(configWithPlainPasswords)); + + // REST API must return hidden password + ODFSettings configWithHiddenPasswords = settingsRead(); + String hiddenPasswordIdentifyier = "***hidden***"; + Assert.assertEquals(configWithHiddenPasswords.getOdfPassword(), hiddenPasswordIdentifyier); + + // Reset passwords + Assert.assertNotNull(System.getProperty("odf.test.password")); + settings = settingsRead(); + settings.setOdfPassword(Encryption.decryptText(System.getProperty("odf.test.password"))); + settingsWrite(JSONUtils.toJSON(settings), HttpStatus.SC_OK); + } + + @Test + public void testSettingsWriteSuccess() throws Exception { + String configSnippet = "{ \"runAnalysisOnImport\": false }"; + logger.info("Testing write settings success with JSON: " + configSnippet); + settingsWrite(configSnippet, HttpStatus.SC_OK); + } + + @Test + public void testSettingsWriteFailure() throws Exception { + String configSnippet = "{ \"runAnalysisOnImport\": \"someInvalidValue\" }"; + logger.info("Testing write settings failure with JSON: " + configSnippet); + settingsWrite(configSnippet, HttpStatus.SC_INTERNAL_SERVER_ERROR); + } + + @Test + public void testSettingsReset() throws Exception { + logger.info("Testing reset settings operation."); + String updatedId = "updatedInstanceId"; + ODFSettings originalConfig = settingsRead(); + String originalInstanceId = originalConfig.getInstanceId(); + originalConfig.setInstanceId(updatedId); + + settingsWrite(JSONUtils.toJSON(originalConfig), HttpStatus.SC_OK); + + ODFSettings newConfig = settingsRead(); + Assert.assertEquals(updatedId, newConfig.getInstanceId()); + + settingsReset(); + + ODFSettings resetConfig = settingsRead(); + String resetInstanceId = resetConfig.getInstanceId(); + + Assert.assertEquals(originalInstanceId, resetInstanceId); + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java new file mode 100755 index 0000000..21b7887 --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/analysis/test/ODFVersionTest.java @@ -0,0 +1,47 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.integrationtest.analysis.test; + +import java.io.InputStream; + +import org.apache.atlas.odf.core.Utils; +import org.apache.atlas.odf.rest.test.RestTestBase; +import org.apache.http.HttpResponse; +import org.apache.http.client.fluent.Executor; +import org.apache.http.client.fluent.Request; +import org.apache.http.client.fluent.Response; +import org.apache.wink.json4j.JSONObject; +import org.junit.Assert; +import org.junit.Test; + +public class ODFVersionTest extends RestTestBase { + + @Test + public void testVersion() throws Exception { + Executor exec = getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(RestTestBase.getBaseURI() + "/engine/version"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + + String s = Utils.getInputStreamAsString(is, "UTF-8"); + logger.info("Version request returned: " + s); + + JSONObject jo = new JSONObject(s); + String version = jo.getString("version"); + Assert.assertNotNull(version); + Assert.assertTrue(version.startsWith("1.2.0-")); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java new file mode 100755 index 0000000..4900d63 --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/annotations/AnnotationsResourceTest.java @@ -0,0 +1,174 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.integrationtest.annotations; + +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; +import java.util.logging.Logger; + +import org.apache.atlas.odf.api.annotation.Annotations; +import org.apache.atlas.odf.api.metadata.models.Annotation; +import org.apache.atlas.odf.api.metadata.models.DataFile; +import org.apache.atlas.odf.integrationtest.metadata.MetadataResourceTest; +import org.apache.atlas.odf.rest.test.RestTestBase; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.client.fluent.Executor; +import org.apache.http.client.fluent.Request; +import org.apache.http.client.fluent.Response; +import org.apache.http.entity.ContentType; +import org.apache.wink.json4j.JSON; +import org.apache.wink.json4j.JSONObject; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import org.apache.atlas.odf.api.metadata.MetaDataObjectReference; +import org.apache.atlas.odf.api.metadata.models.ProfilingAnnotation; +import org.apache.atlas.odf.json.JSONUtils; + +public class AnnotationsResourceTest extends RestTestBase { + Logger logger = Logger.getLogger(AnnotationsResourceTest.class.getName()); + + @Before + public void createSampleData() throws Exception { + Executor exec = getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(getBaseURI() + "/metadata/sampledata"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + checkResult(httpResp, HttpStatus.SC_OK); + } + + public static class AnnotationsResourceTestProfilingAnnotation extends ProfilingAnnotation { + private String newAnnotProp; + + public String getNewAnnotProp() { + return newAnnotProp; + } + + public void setNewAnnotProp(String newAnnotProp) { + this.newAnnotProp = newAnnotProp; + } + + } + + static String newAnnotPropValue = "newAnnotPropValue" + UUID.randomUUID().toString(); + static String newAnnotPropKey = "newAnnotProp"; + + static String unknownAnnotType = "UnknownAnnotType" + UUID.randomUUID().toString(); + + List createTestAnnotations(MetaDataObjectReference ref, String reqId) { + List result = new ArrayList<>(); + AnnotationsResourceTestProfilingAnnotation annot = new AnnotationsResourceTestProfilingAnnotation(); + annot.setProfiledObject(ref); + annot.setNewAnnotProp(newAnnotPropValue); + annot.setAnalysisRun(reqId); + result.add(annot); + + ProfilingAnnotation genericAnnot = new ProfilingAnnotation(); + genericAnnot.setProfiledObject(ref); + genericAnnot.setAnalysisRun(reqId); + genericAnnot.setJsonProperties("{\"" + newAnnotPropKey + "\": \"" + newAnnotPropValue + "\"}"); + result.add(genericAnnot); + + return result; + } + + MetaDataObjectReference getTestDataSetRef() throws Exception { + String s = MetadataResourceTest.getAllMetadataObjectsOfType("DataFile"); + logger.info("Retrieved test data set refs: " + s); + List dfRefs = JSONUtils.fromJSONList(s, DataFile.class); + return dfRefs.get(0).getReference(); + } + + @Test + public void testAnnotationStore() throws Exception { + MetaDataObjectReference dfRef = getTestDataSetRef(); + String reqId = "TestRequestId" + UUID.randomUUID().toString(); + logger.info("Test Annotatoin store with request ID: " + reqId); + List newAnnots = createTestAnnotations(dfRef, reqId); + + Executor exec = getRestClientManager().getAuthenticatedExecutor(); + List createdAnnotIds = new ArrayList<>(); + // create annotations + for (Annotation annot : newAnnots) { + String restRequestBody = JSONUtils.toJSON(annot); + logger.info("Creating annotation via request " + restRequestBody); + Request req = Request.Post(getBaseURI() + "/annotations").bodyString(restRequestBody, ContentType.APPLICATION_JSON); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + checkResult(httpResp, HttpStatus.SC_CREATED); + InputStream is = httpResp.getEntity().getContent(); + MetaDataObjectReference createdAnnot = JSONUtils.fromJSON(is, MetaDataObjectReference.class); + Assert.assertNotNull(createdAnnot); + Assert.assertNotNull(createdAnnot.getId()); + createdAnnotIds.add(createdAnnot.getId()); + } + logger.info("Annotations created, now retrieving them again: " + createdAnnotIds); + + // check retrieval + Request req = Request.Get(getBaseURI() + "/annotations?assetReference=" + dfRef.getId()); + Response resp = exec.execute(req); + + HttpResponse httpResp = resp.returnResponse(); + checkResult(httpResp, HttpStatus.SC_OK); + Annotations retrieveResult = JSONUtils.fromJSON(httpResp.getEntity().getContent(), Annotations.class); + List retrievedAnnots = retrieveResult.getAnnotations(); + logger.info("Retrieved annotations: " + retrievedAnnots); + int foundAnnots = 0; + for (Annotation retrievedAnnot : retrievedAnnots) { + logger.info("Checking annotation: " + retrievedAnnot.getReference()); + logger.info("Annotation " + retrievedAnnot.getReference().getId() + " has request ID: " + retrievedAnnot.getAnalysisRun()); + if (reqId.equals(retrievedAnnot.getAnalysisRun())) { + logger.info("Checking annotation " + retrievedAnnot + " of class " + retrievedAnnot.getClass()); + Assert.assertTrue(retrievedAnnot instanceof ProfilingAnnotation); + + if (retrievedAnnot instanceof AnnotationsResourceTestProfilingAnnotation) { + AnnotationsResourceTestProfilingAnnotation tpa = (AnnotationsResourceTestProfilingAnnotation) retrievedAnnot; + Assert.assertEquals(dfRef, tpa.getProfiledObject()); + Assert.assertEquals(newAnnotPropValue, tpa.getNewAnnotProp()); + } else { + // other annotations are "unknown", thus no subclass of ProfilingAnnotation + Assert.assertTrue(retrievedAnnot.getClass().equals(ProfilingAnnotation.class)); + + String jsonProps = retrievedAnnot.getJsonProperties(); + Assert.assertNotNull(jsonProps); + JSONObject jo = (JSONObject) JSON.parse(jsonProps); + Assert.assertTrue(jo.containsKey(newAnnotPropKey)); + Assert.assertEquals(newAnnotPropValue, jo.getString(newAnnotPropKey)); + } + Assert.assertTrue(createdAnnotIds.contains(retrievedAnnot.getReference().getId())); + foundAnnots++; + + // check that retrieval by Id works + logger.info("Retrieving annotation " + retrievedAnnot.getReference().getId() + " again"); + String url = getBaseURI() + "/annotations/objects/" + retrievedAnnot.getReference().getId(); + logger.info("Retriveing annotation with URL: " + url); + Request req1 = Request.Get(url); + Response resp1 = exec.execute(req1); + + HttpResponse httpResp1 = resp1.returnResponse(); + checkResult(httpResp1, HttpStatus.SC_OK); + Annotation newRetrievedAnnot = JSONUtils.fromJSON(httpResp1.getEntity().getContent(), Annotation.class); + Assert.assertEquals(retrievedAnnot.getReference(), newRetrievedAnnot.getReference()); + Assert.assertEquals(retrievedAnnot.getClass(), newRetrievedAnnot.getClass()); + Assert.assertEquals(retrievedAnnot.getJsonProperties(), newRetrievedAnnot.getJsonProperties()); + } + } + Assert.assertEquals(createdAnnotIds.size(), foundAnnots); + + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java new file mode 100755 index 0000000..d76a272 --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/MetadataResourceTest.java @@ -0,0 +1,81 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.integrationtest.metadata; + +import java.io.InputStream; +import java.util.logging.Logger; + +import org.apache.atlas.odf.api.metadata.MetadataStore; +import org.apache.atlas.odf.rest.test.RestTestBase; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.client.fluent.Executor; +import org.apache.http.client.fluent.Request; +import org.apache.http.client.fluent.Response; +import org.apache.http.client.utils.URIBuilder; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import org.apache.atlas.odf.api.ODFFactory; +import org.apache.atlas.odf.api.metadata.models.BusinessTerm; +import org.apache.atlas.odf.api.metadata.models.DataFile; +import org.apache.atlas.odf.json.JSONUtils; + +public class MetadataResourceTest extends RestTestBase { + + static Logger logger = Logger.getLogger(MetadataResourceTest.class.getName()); + + @Before + public void createSampleData() throws Exception { + Executor exec = getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(getBaseURI() + "/metadata/sampledata"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + checkResult(httpResp, HttpStatus.SC_OK); + } + + public static String getAllMetadataObjectsOfType(String dataType) throws Exception { + MetadataStore mdsForQueryGeneration = new ODFFactory().create().getMetadataStore(); + String query = mdsForQueryGeneration.newQueryBuilder().objectType(dataType).build(); + logger.info("Metadata search query metadata " + query); + + URIBuilder builder = new URIBuilder(getBaseURI() + "/metadata/search").addParameter("query", query); + String uri = builder.build().toString(); + logger.info("Searching against URL: " + uri); + Request req = Request.Get(uri); + Response response = getRestClientManager().getAuthenticatedExecutor().execute(req); + HttpResponse httpResp = response.returnResponse(); + Assert.assertEquals(HttpStatus.SC_OK, httpResp.getStatusLine().getStatusCode()); + InputStream is = httpResp.getEntity().getContent(); + String s = JSONUtils.getInputStreamAsString(is, "UTF-8"); + is.close(); + logger.info("Response: " + s); + return s; + } + + @Test + public void testMetadataResourceSearchOMDataFile() throws Exception { + String s = getAllMetadataObjectsOfType("DataFile"); + Assert.assertTrue(s.contains("DataFile")); // minimal checking that JSON contains something useful and specific to this type + JSONUtils.fromJSONList(s, DataFile.class); + } + + @Test + public void testMetadataResourceSearchOMBusinessTerm() throws Exception { + String s = getAllMetadataObjectsOfType("BusinessTerm"); + Assert.assertTrue(s.contains("BusinessTerm")); // minimal checking that JSON contains something useful and specific to this type + JSONUtils.fromJSONList(s, BusinessTerm.class); + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java new file mode 100755 index 0000000..c70c500 --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/metadata/RemoteMetadataStoreTest.java @@ -0,0 +1,97 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.integrationtest.metadata; + +import java.net.URISyntaxException; +import java.util.Properties; + +import org.apache.atlas.odf.core.Encryption; +import org.apache.atlas.odf.core.integrationtest.metadata.MetadataStoreTestBase; +import org.apache.atlas.odf.rest.test.RestTestBase; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import org.apache.atlas.odf.api.metadata.MetaDataObjectReference; +import org.apache.atlas.odf.api.metadata.MetadataQueryBuilder; +import org.apache.atlas.odf.api.metadata.MetadataStore; +import org.apache.atlas.odf.api.metadata.MetadataStoreException; +import org.apache.atlas.odf.api.metadata.RemoteMetadataStore; + +public class RemoteMetadataStoreTest extends MetadataStoreTestBase { + + protected MetadataStore getMetadataStore() { + RemoteMetadataStore rms = null; + try { + rms = new RemoteMetadataStore(RestTestBase.getOdfBaseUrl(), RestTestBase.getOdfUser(), Encryption.decryptText(RestTestBase.getOdfPassword()), true); + } catch (MetadataStoreException | URISyntaxException e) { + throw new RuntimeException("Error connecting to remote metadata store,", e); + } + return rms; + } + + //TODO: Remove all methods below this comment once the DefaultMetadataStore is queue-based (issue #122) + // RemoteMetadataStore will then use the exact same test cases as the other (writable) metadata stores + + @Before + public void createSampleData() { + //TODO: Remove this method once the DefaultMetadataStore is queue-based (issue #122) + MetadataStore mds = getMetadataStore(); + mds.resetAllData(); + mds.createSampleData(); + } + + @Test + public void testProperties() throws Exception { + //TODO: Remove this method once the DefaultMetadataStore is queue-based (issue #122) + RemoteMetadataStore rms = new RemoteMetadataStore(RestTestBase.getOdfBaseUrl(), RestTestBase.getOdfUser(), Encryption.decryptText(RestTestBase.getOdfPassword()), true); + Properties props = rms.getProperties(); + Assert.assertNotNull(props); + Assert.assertTrue(!props.isEmpty()); + } + + @Test + public void testReferences() throws Exception { + //TODO: Do not overwrite original method once DefaultMetadataStore is queue-based + MetadataStore mds = getMetadataStore(); + MetadataStoreTestBase.checkReferences(mds, MetadataStoreTestBase.getDataFileTestObject(mds)); + } + + @Test + public void testSearchAndRetrieve() { + //TODO: Do not overwrite original method once DefaultMetadataStore is queue-based + + // Test retrieve + MetadataStore mds = getMetadataStore(); + MetaDataObjectReference bankClientsShortRef = mds.search(mds.newQueryBuilder().objectType("DataFile").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "BankClientsShort").build()).get(0); + Assert.assertEquals("The metadata store did not retrieve the object with the expected name.", "BankClientsShort", mds.retrieve(bankClientsShortRef).getName()); + + // Test queries with conditions + checkQueryResults(mds, new String[] { "BankClientsShort" }, mds.newQueryBuilder().objectType("DataFile").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "BankClientsShort").build(), false); + checkQueryResults(mds, new String[] { "SimpleExampleTable" }, mds.newQueryBuilder().objectType("DataFile").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.NOT_EQUALS, "BankClientsShort").build(), false); + checkQueryResults(mds, new String[] { "NAME" }, + mds.newQueryBuilder().objectType("Column").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "NAME").simpleCondition("dataType", MetadataQueryBuilder.COMPARATOR.EQUALS, "string").build(), false); + + // Test type hierarchy + checkQueryResults(mds, new String[] { "BankClientsShort", "SimpleExampleTable" }, mds.newQueryBuilder().objectType("DataFile").build(), true); + checkQueryResults(mds, new String[] { "BankClientsShort", "SimpleExampleTable" }, mds.newQueryBuilder().objectType("RelationalDataSet").build(), true); + checkQueryResults(mds, new String[] { "BankClientsShort", "SimpleExampleTable", "Simple URL example document", "Simple local example document" }, mds.newQueryBuilder().objectType("DataSet").build(), false); + checkQueryResults(mds, new String[] { "BankClientsShort" }, mds.newQueryBuilder().objectType("MetaDataObject").simpleCondition("name", MetadataQueryBuilder.COMPARATOR.EQUALS, "BankClientsShort").build(), false); + } + + @Test + public void testAnnotations() { + //TODO: Remove this method once the DefaultMetadataStore is queue-based (issue #122) + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java new file mode 100755 index 0000000..d7bbc0f --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/integrationtest/spark/SparkDiscoveryServiceWebTest.java @@ -0,0 +1,133 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.integrationtest.spark; + +import java.util.List; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.apache.atlas.odf.api.analysis.AnalysisRequest; +import org.apache.atlas.odf.api.metadata.MetadataStore; +import org.apache.atlas.odf.api.metadata.models.Annotation; +import org.apache.atlas.odf.api.metadata.models.RelationalDataSet; +import org.apache.atlas.odf.api.settings.ODFSettings; +import org.apache.atlas.odf.rest.test.RestTestBase; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.client.fluent.Executor; +import org.apache.http.client.fluent.Request; +import org.apache.http.client.fluent.Response; +import org.apache.wink.json4j.JSONException; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import org.apache.atlas.odf.api.metadata.RemoteMetadataStore; +import org.apache.atlas.odf.core.Encryption; +import org.apache.atlas.odf.api.ODFFactory; +import org.apache.atlas.odf.core.Utils; +import org.apache.atlas.odf.api.analysis.AnalysisRequestStatus.State; +import org.apache.atlas.odf.api.annotation.AnnotationStore; +import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceEndpoint; +import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceProperties; +import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceSparkEndpoint; +import org.apache.atlas.odf.api.discoveryservice.DiscoveryServiceSparkEndpoint.SERVICE_INTERFACE_TYPE; +import org.apache.atlas.odf.core.integrationtest.metadata.internal.spark.SparkDiscoveryServiceLocalTest; +import org.apache.atlas.odf.core.integrationtest.metadata.internal.spark.SparkDiscoveryServiceLocalTest.DATASET_TYPE; +import org.apache.atlas.odf.api.settings.SparkConfig; +import org.apache.atlas.odf.json.JSONUtils; + +public class SparkDiscoveryServiceWebTest extends RestTestBase { + protected static Logger logger = Logger.getLogger(SparkDiscoveryServiceWebTest.class.getName()); + + @Before + public void createSampleData() throws Exception { + Executor exec = getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(getBaseURI() + "/metadata/sampledata"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + checkResult(httpResp, HttpStatus.SC_OK); + } + + public static DiscoveryServiceProperties getSparkSummaryStatisticsService() throws JSONException { + DiscoveryServiceProperties dsProperties = new DiscoveryServiceProperties(); + dsProperties.setId(SparkDiscoveryServiceLocalTest.DISCOVERY_SERVICE_ID); + dsProperties.setName("Spark summary statistics service"); + dsProperties.setDescription("Example discovery service calling summary statistics Spark application"); + dsProperties.setIconUrl("spark.png"); + dsProperties.setLink("http://www.spark.apache.org"); + dsProperties.setPrerequisiteAnnotationTypes(null); + dsProperties.setResultingAnnotationTypes(null); + dsProperties.setSupportedObjectTypes(null); + dsProperties.setAssignedObjectTypes(null); + dsProperties.setAssignedObjectCandidates(null); + dsProperties.setParallelismCount(2); + DiscoveryServiceSparkEndpoint endpoint = new DiscoveryServiceSparkEndpoint(); + endpoint.setJar("file:///tmp/odf-spark/odf-spark-example-application-1.2.0-SNAPSHOT.jar"); + endpoint.setInputMethod(SERVICE_INTERFACE_TYPE.DataFrame); + endpoint.setClassName("org.apache.atlas.odf.core.spark.SummaryStatistics"); + dsProperties.setEndpoint(JSONUtils.convert(endpoint, DiscoveryServiceEndpoint.class)); + return dsProperties; + } + + public void runSparkServiceTest(SparkConfig sparkConfig, DATASET_TYPE dataSetType, DiscoveryServiceProperties regInfo, String[] annotationNames) throws Exception{ + logger.log(Level.INFO, "Testing spark application on ODF webapp url {0}.", getOdfBaseUrl()); + + logger.info("Using Spark configuration: " + JSONUtils.toJSON(sparkConfig)); + ODFSettings settings = settingsRead(); + settings.setSparkConfig(sparkConfig); + settings.setOdfUrl(Utils.getSystemPropertyExceptionIfMissing("odf.test.webapp.url")); + settingsWrite(JSONUtils.toJSON(settings), HttpStatus.SC_OK); + + logger.log(Level.INFO, "Trying to delete existing discovery service: " + SparkDiscoveryServiceLocalTest.DISCOVERY_SERVICE_ID); + deleteService(SparkDiscoveryServiceLocalTest.DISCOVERY_SERVICE_ID); + + logger.info("Using discovery service: " + JSONUtils.toJSON(regInfo)); + createService(JSONUtils.toJSON(regInfo), HttpStatus.SC_OK); + + checkServiceExists(regInfo.getId()); + + MetadataStore mds = new RemoteMetadataStore(getOdfBaseUrl(), getOdfUser(), Encryption.decryptText(getOdfPassword()), true); + Assert.assertNotNull(mds); + + + RelationalDataSet dataSet = null; + if (dataSetType == DATASET_TYPE.FILE) { + dataSet = SparkDiscoveryServiceLocalTest.getTestDataFile(mds); + } else if (dataSetType == DATASET_TYPE.TABLE) { + dataSet = SparkDiscoveryServiceLocalTest.getTestTable(mds); + } else { + Assert.assertTrue(false); + } + logger.info("Using dataset: " + JSONUtils.toJSON(dataSet)); + + AnnotationStore as = new ODFFactory().create().getAnnotationStore(); + + AnalysisRequest request = SparkDiscoveryServiceLocalTest.getSparkAnalysisRequest(dataSet); + logger.info("Using analysis request: " + JSONUtils.toJSON(request)); + + logger.info("Starting analysis..."); + String requestId = runAnalysis(request, State.FINISHED); + + List annots = as.getAnnotations(null, requestId); + logger.info("Number of annotations created: " + annots.size()); + Assert.assertTrue("No annotations have been created.", annots.size() > 0); + } + + @Test + public void testSparkServiceRESTAPI() throws Exception{ + runSparkServiceTest(SparkDiscoveryServiceLocalTest.getLocalSparkConfig(), DATASET_TYPE.FILE, getSparkSummaryStatisticsService(), new String[] { "SparkSummaryStatisticsAnnotation", "SparkTableAnnotation" }); + } + +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java ---------------------------------------------------------------------- diff --git a/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java b/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java new file mode 100755 index 0000000..e23dd4e --- /dev/null +++ b/odf/odf-web/src/test/java/org/apache/atlas/odf/rest/test/RestTestBase.java @@ -0,0 +1,289 @@ +/** + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.atlas.odf.rest.test; + +import java.io.InputStream; +import java.net.URI; +import java.text.MessageFormat; +import java.util.logging.Level; +import java.util.logging.Logger; + +import org.apache.http.Header; +import org.apache.http.HttpResponse; +import org.apache.http.HttpStatus; +import org.apache.http.StatusLine; +import org.apache.http.client.fluent.Executor; +import org.apache.http.client.fluent.Request; +import org.apache.http.client.fluent.Response; +import org.apache.http.client.utils.URIBuilder; +import org.apache.http.entity.ContentType; +import org.apache.http.message.BasicHeader; +import org.junit.Assert; +import org.junit.BeforeClass; + +import org.apache.atlas.odf.core.Encryption; +import org.apache.atlas.odf.core.Utils; +import org.apache.atlas.odf.api.analysis.AnalysisRequestTrackers; +import org.apache.atlas.odf.api.analysis.AnalysisRequest; +import org.apache.atlas.odf.api.analysis.AnalysisRequestStatus; +import org.apache.atlas.odf.api.analysis.AnalysisRequestSummary; +import org.apache.atlas.odf.api.analysis.AnalysisResponse; +import org.apache.atlas.odf.api.annotation.Annotations; +import org.apache.atlas.odf.api.analysis.AnalysisRequestStatus.State; +import org.apache.atlas.odf.api.connectivity.RESTClientManager; +import org.apache.atlas.odf.api.settings.ODFSettings; +import org.apache.atlas.odf.api.utils.ODFLogConfig; +import org.apache.atlas.odf.core.test.TestEnvironment; +import org.apache.atlas.odf.json.JSONUtils; + +public class RestTestBase { + + protected static Logger logger = Logger.getLogger(RestTestBase.class.getName()); + + @BeforeClass + public static void setup() throws Exception { + ODFLogConfig.run(); + TestEnvironment.startMessaging(); + } + + protected static void checkResult(HttpResponse httpResponse, int expectedCode) { + StatusLine sl = httpResponse.getStatusLine(); + int code = sl.getStatusCode(); + logger.info("Http request returned: " + code + ", message: " + sl.getReasonPhrase()); + Assert.assertEquals(expectedCode, code); + } + + public static RESTClientManager getRestClientManager() { + return new RESTClientManager(URI.create(getOdfUrl()), getOdfUser(), Encryption.decryptText(getOdfPassword())); + } + + public static String getOdfBaseUrl() { + String odfBaseURL = System.getProperty("odf.test.base.url"); + return odfBaseURL; + } + + public static String getOdfUrl() { + String odfURL = System.getProperty("odf.test.webapp.url"); + return odfURL; + } + + public static String getOdfUser() { + String odfUser = System.getProperty("odf.test.user"); + return odfUser; + } + + public static String getOdfPassword() { + String odfPassword = System.getProperty("odf.test.password"); + return odfPassword; + } + + public static String getBaseURI() { + return getOdfBaseUrl() + "/odf/api/v1"; + } + + public String runAnalysis(AnalysisRequest request, State expectedState) throws Exception { + Executor exec = getRestClientManager().getAuthenticatedExecutor(); + String json = JSONUtils.toJSON(request); + logger.info("Starting analysis via POST request: " + json); + + Header header = new BasicHeader("Content-Type", "application/json"); + Request req = Request.Post(getBaseURI() + "/analyses").bodyString(json, ContentType.APPLICATION_JSON).addHeader(header); + + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + checkResult(httpResp, HttpStatus.SC_OK); + + InputStream is = httpResp.getEntity().getContent(); + String jsonResponse = JSONUtils.getInputStreamAsString(is, "UTF-8"); + logger.info("Analysis response: " + jsonResponse); + AnalysisResponse analysisResponse = JSONUtils.fromJSON(jsonResponse, AnalysisResponse.class); + Assert.assertNotNull(analysisResponse); + String requestId = analysisResponse.getId(); + Assert.assertNotNull(requestId); + logger.info("Request Id: " + requestId); + + Assert.assertTrue(! analysisResponse.isInvalidRequest()); + + AnalysisRequestStatus status = null; + int maxPolls = 400; + do { + Request statusRequest = Request.Get(getBaseURI() + "/analyses/" + requestId); + logger.info("Getting analysis status"); + resp = exec.execute(statusRequest); + httpResp = resp.returnResponse(); + checkResult(httpResp, HttpStatus.SC_OK); + + String statusResponse = JSONUtils.getInputStreamAsString(httpResp.getEntity().getContent(), "UTF-8"); + logger.info("Analysis status: " + statusResponse); + status = JSONUtils.fromJSON(statusResponse, AnalysisRequestStatus.class); + + logger.log(Level.INFO, "Poll request for request ID ''{0}'' (expected state: ''{1}'', details: ''{2}''", new Object[] { requestId, status.getState(), status.getDetails(), State.FINISHED }); + maxPolls--; + Thread.sleep(1000); + } while (maxPolls > 0 && (status.getState() == State.ACTIVE || status.getState() == State.QUEUED)); + Assert.assertEquals(State.FINISHED, status.getState()); + return requestId; + } + + public void createService(String serviceJSON, int expectedCode) throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Header header = new BasicHeader("Content-Type", "application/json"); + + Request req = Request.Post(RestTestBase.getBaseURI() + "/services")// + .bodyString(serviceJSON, ContentType.APPLICATION_JSON) // + .addHeader(header); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + String s = Utils.getInputStreamAsString(is, "UTF-8"); + is.close(); + logger.info("Create service request return code: " + httpResp.getStatusLine().getStatusCode() + ", content: " + s); + checkResult(httpResp, expectedCode); + } + + public void checkServiceExists(String serviceId) throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Header header = new BasicHeader("Content-Type", "application/json"); + + Request req = Request.Get(RestTestBase.getBaseURI() + "/services/" + serviceId).addHeader(header); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + String s = Utils.getInputStreamAsString(is, "UTF-8"); + is.close(); + logger.info("Get service request return code: " + httpResp.getStatusLine().getStatusCode() + ", content: " + s); + checkResult(httpResp, 200); + + } + + public void deleteService(String serviceId, int expectedCode) throws Exception { + checkResult(this.deleteService(serviceId), expectedCode); + } + + public HttpResponse deleteService(String serviceId) throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Header header = new BasicHeader("Content-Type", "application/json"); + URIBuilder uri = new URIBuilder(RestTestBase.getBaseURI() + "/services/" + serviceId + "/cancel"); + Request req = Request.Post(uri.build())// + .addHeader(header); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + String s = Utils.getInputStreamAsString(is, "UTF-8"); + is.close(); + logger.info("Delete service request returned: " + s); + return httpResp; + } + + public ODFSettings settingsRead() throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(RestTestBase.getBaseURI() + "/settings"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + + String s = Utils.getInputStreamAsString(is, "UTF-8"); + logger.info("Settings read request returned: " + s); + is.close(); + checkResult(httpResp, HttpStatus.SC_OK); + return JSONUtils.fromJSON(s, ODFSettings.class); + } + + public void settingsWrite(String configSnippet, int expectedCode) throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Header header = new BasicHeader("Content-Type", "application/json"); + + Request req = Request.Put(RestTestBase.getBaseURI() + "/settings")// + .bodyString(configSnippet, ContentType.APPLICATION_JSON) // + .addHeader(header); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + String s = Utils.getInputStreamAsString(is, "UTF-8"); + is.close(); + logger.info("Settings write request returned: " + s); + checkResult(httpResp, expectedCode); + } + + public void settingsReset() throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Header header = new BasicHeader("Content-Type", "application/json"); + Request req = Request.Post(RestTestBase.getBaseURI() + "/settings/reset")// + .addHeader(header); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + String s = Utils.getInputStreamAsString(is, "UTF-8"); + is.close(); + logger.info("Config reset request returned: " + s); + checkResult(httpResp, HttpStatus.SC_OK); + } + + public void cancelAnalysisRequest(String requestId, int expectedCode) throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Header header = new BasicHeader("Content-Type", "application/json"); + + Request req = Request.Post(RestTestBase.getBaseURI() + "/analyses/" + requestId + "/cancel").addHeader(header); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + String s = Utils.getInputStreamAsString(is, "UTF-8"); + is.close(); + logger.info("Cancel analyses request returned: " + s); + checkResult(httpResp, expectedCode); + } + + public AnalysisRequestTrackers getAnalysesRequests(int offset, int limit) throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(MessageFormat.format("{0}/analyses?offset={1}&limit={2}", RestTestBase.getBaseURI(), offset, limit)); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + + String s = Utils.getInputStreamAsString(is, "UTF-8"); + logger.info("Analyses read request returned: " + s); + is.close(); + checkResult(httpResp, HttpStatus.SC_OK); + return JSONUtils.fromJSON(s, AnalysisRequestTrackers.class); + } + + public AnalysisRequestSummary getAnalysesStats() throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + Request req = Request.Get(RestTestBase.getBaseURI() + "/analyses/stats"); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + + String s = Utils.getInputStreamAsString(is, "UTF-8"); + logger.info("Analyses statistics request returned: " + s); + is.close(); + checkResult(httpResp, HttpStatus.SC_OK); + return JSONUtils.fromJSON(s, AnalysisRequestSummary.class); + } + + public Annotations getAnnotations(String analysisRequestId) throws Exception { + Executor exec = RestTestBase.getRestClientManager().getAuthenticatedExecutor(); + URIBuilder uri = new URIBuilder(RestTestBase.getBaseURI() + "/annotations").addParameter("analysisRequestId", analysisRequestId); + Request req = Request.Get(uri.build()); + Response resp = exec.execute(req); + HttpResponse httpResp = resp.returnResponse(); + InputStream is = httpResp.getEntity().getContent(); + + String s = Utils.getInputStreamAsString(is, "UTF-8"); + logger.info("Settings read request returned: " + s); + is.close(); + checkResult(httpResp, HttpStatus.SC_OK); + return JSONUtils.fromJSON(s, Annotations.class); + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/odf-web/webpack.config.js ---------------------------------------------------------------------- diff --git a/odf/odf-web/webpack.config.js b/odf/odf-web/webpack.config.js new file mode 100755 index 0000000..380f705 --- /dev/null +++ b/odf/odf-web/webpack.config.js @@ -0,0 +1,65 @@ +/** + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var path = require('path'); + +const APP_ROOT="./src/main/webapp"; +const MAIN_FILE= path.resolve(APP_ROOT + "/scripts/odf-console.js"); +const CLIENT_FILE= path.resolve(APP_ROOT + "/scripts/odf-client.js"); + +module.exports = { + entry: { + "odf-web": MAIN_FILE, + "odf-client": CLIENT_FILE + }, + + output: { + filename: "/[name].js", + path: path.resolve(APP_ROOT) + }, + + module: { + loaders: [ + { + test: /\.jsx?$/, + loader: 'babel', + query: { + presets: ['react', 'es2015'] + }, + include: /(webapp)/, + exlude: /(odf-web.js)/ + }, + { + test: /\.(jsx|js)$/, + loader: 'imports?jQuery=jquery,$=jquery,this=>window' + }, + { + test: /\.css$/, + loader: 'style!css' + }, + { + test: /\.(png|jpg)$/, + loader: 'url?limit=25000&name=resources/img/[hash].[ext]' + }, + { + test: /\.woff(2)?(\?v=[0-9]\.[0-9]\.[0-9])?$/, + loader: 'url-loader?limit=25000&&minetype=application/font-woff&name=resources/fonts/[hash].[ext]' + }, + { + test: /\.(ttf|eot|svg)(\?v=[0-9]\.[0-9]\.[0-9])?$/, + loader: 'url?limit=25000&name=resources/fonts/[hash].[ext]' + } + ] + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/pom.xml ---------------------------------------------------------------------- diff --git a/odf/pom.xml b/odf/pom.xml new file mode 100755 index 0000000..2e1f263 --- /dev/null +++ b/odf/pom.xml @@ -0,0 +1,133 @@ + + + + 4.0.0 + odf + odf + org.apache.atlas.odf + 1.2.0-SNAPSHOT + pom + + + + odf-api + odf-core + odf-store + odf-messaging + + + + + atlas + + odf-atlas + + + + complete-build + + + reduced-build + !true + + + + odf-spark-example-application + odf-spark + odf-doc + odf-web + odf-archetype-discoveryservice + + + + test-env + + odf-test-env + + + + + + UTF-8 + UTF-8 + localhost:2181 + /tmp + ALL,${odf.test.logdir}/${project.name}-unit-trace.log + ALL,${odf.test.logdir}/${project.name}-integration-trace.log + 2.6.5 + 58080 + https://localhost:${jetty.maven.plugin.port} + https://localhost:${jetty.maven.plugin.port}/odf-web-1.2.0-SNAPSHOT + sdp + ZzTeX3hKtVORgks+2TaLPWxerucPBoxK + 0.7-incubating-release + https://localhost:21443 + admin + UR0+HOiApXG9B8SNpKN5ww== + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.3.2 + + 1.7 + 1.7 + + + + org.apache.maven.plugins + maven-antrun-plugin + 1.8 + + + false + test + + run + + + + + + + + + + + + + org.apache.maven.plugins + maven-dependency-plugin + 2.10 + + + list-dependencies + validate + + tree + + + + + + + + + http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/odf/prepare_embedded_jetty.xml ---------------------------------------------------------------------- diff --git a/odf/prepare_embedded_jetty.xml b/odf/prepare_embedded_jetty.xml new file mode 100755 index 0000000..c9aa044 --- /dev/null +++ b/odf/prepare_embedded_jetty.xml @@ -0,0 +1,90 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/6d19e129/pom.xml ---------------------------------------------------------------------- diff --git a/pom.xml b/pom.xml index ebc07e2..23042e3 100644 --- a/pom.xml +++ b/pom.xml @@ -2009,6 +2009,11 @@ **/policy-store.txt **/*rebel*.xml **/*rebel*.xml.bak + + + **/.gitignore + odf/**/*.csv + odf/**/*.txt