metron-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ceste...@apache.org
Subject [1/3] incubator-metron git commit: METRON-339: Create YARN app to deploy endpoints closes apache/incubator-metron#205
Date Thu, 11 Aug 2016 21:31:08 GMT
Repository: incubator-metron
Updated Branches:
  refs/heads/master 9d0e4a020 -> e5d75e703


http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/service/yarn/YarnUtils.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/service/yarn/YarnUtils.java b/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/service/yarn/YarnUtils.java
new file mode 100644
index 0000000..52e529c
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/service/yarn/YarnUtils.java
@@ -0,0 +1,145 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.maas.service.yarn;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.client.api.TimelineClient;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
+import org.apache.metron.maas.service.ApplicationMaster;
+import org.apache.metron.maas.service.ContainerEvents;
+
+import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.nio.ByteBuffer;
+import java.security.PrivilegedExceptionAction;
+import java.util.Iterator;
+
+public enum YarnUtils {
+  INSTANCE;
+  private static final Log LOG = LogFactory.getLog(YarnUtils.class);
+  public UserGroupInformation createUserGroup(Credentials credentials) throws IOException {
+    credentials = credentials == null? UserGroupInformation.getCurrentUser().getCredentials():credentials;
+    String appSubmitterUserName =
+            System.getenv(ApplicationConstants.Environment.USER.name());
+    UserGroupInformation appSubmitterUgi =
+            UserGroupInformation.createRemoteUser(appSubmitterUserName);
+    appSubmitterUgi.addCredentials(credentials);
+    return appSubmitterUgi;
+  }
+
+  public ByteBuffer tokensFromCredentials(Credentials credentials) throws IOException {
+    // Note: Credentials, Token, UserGroupInformation, DataOutputBuffer class
+    // are marked as LimitedPrivate
+    credentials = credentials == null? UserGroupInformation.getCurrentUser().getCredentials():credentials;
+    DataOutputBuffer dob = new DataOutputBuffer();
+    credentials.writeTokenStorageToStream(dob);
+    // Now remove the AM->RM token so that containers cannot access it.
+    Iterator<Token<?>> iter = credentials.getAllTokens().iterator();
+    LOG.info("Executing with tokens:");
+    while (iter.hasNext()) {
+      Token<?> token = iter.next();
+      LOG.info(token);
+      if (token.getKind().equals(AMRMTokenIdentifier.KIND_NAME)) {
+        iter.remove();
+      }
+    }
+    return ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
+  }
+  public void publishContainerEndEvent(
+          final TimelineClient timelineClient, ContainerStatus container,
+          String domainId, UserGroupInformation ugi) {
+    final TimelineEntity entity = new TimelineEntity();
+    entity.setEntityId(container.getContainerId().toString());
+    entity.setEntityType(ApplicationMaster.DSEntity.DS_CONTAINER.toString());
+    entity.setDomainId(domainId);
+    entity.addPrimaryFilter("user", ugi.getShortUserName());
+    TimelineEvent event = new TimelineEvent();
+    event.setTimestamp(System.currentTimeMillis());
+    event.setEventType(ContainerEvents.CONTAINER_END.toString());
+    event.addEventInfo("State", container.getState().name());
+    event.addEventInfo("Exit Status", container.getExitStatus());
+    entity.addEvent(event);
+    try {
+      timelineClient.putEntities(entity);
+    } catch (YarnException | IOException e) {
+      LOG.error("Container end event could not be published for "
+              + container.getContainerId().toString(), e);
+    }
+  }
+  public void publishApplicationAttemptEvent(
+          final TimelineClient timelineClient, String appAttemptId,
+          ContainerEvents appEvent, String domainId, UserGroupInformation ugi) {
+    final TimelineEntity entity = new TimelineEntity();
+    entity.setEntityId(appAttemptId);
+    entity.setEntityType(ApplicationMaster.DSEntity.DS_APP_ATTEMPT.toString());
+    entity.setDomainId(domainId);
+    entity.addPrimaryFilter("user", ugi.getShortUserName());
+    TimelineEvent event = new TimelineEvent();
+    event.setEventType(appEvent.toString());
+    event.setTimestamp(System.currentTimeMillis());
+    entity.addEvent(event);
+    try {
+      timelineClient.putEntities(entity);
+    } catch (YarnException | IOException e) {
+      LOG.error("App Attempt "
+              + (appEvent.equals(ContainerEvents.APP_ATTEMPT_START) ? "start" : "end")
+              + " event could not be published for "
+              + appAttemptId.toString(), e);
+    }
+  }
+  public void publishContainerStartEvent(
+          final TimelineClient timelineClient, Container container, String domainId,
+          UserGroupInformation ugi) {
+    final TimelineEntity entity = new TimelineEntity();
+    entity.setEntityId("" + container.getId());
+    entity.setEntityType(ApplicationMaster.DSEntity.DS_CONTAINER.toString());
+    entity.setDomainId(domainId);
+    entity.addPrimaryFilter("user", ugi.getShortUserName());
+    TimelineEvent event = new TimelineEvent();
+    event.setTimestamp(System.currentTimeMillis());
+    event.setEventType(ContainerEvents.CONTAINER_START.toString());
+    event.addEventInfo("Node", container.getNodeId().toString());
+    event.addEventInfo("Resources", container.getResource().toString());
+    entity.addEvent(event);
+
+    try {
+      ugi.doAs(new PrivilegedExceptionAction<TimelinePutResponse>() {
+        @Override
+        public TimelinePutResponse run() throws Exception {
+          return timelineClient.putEntities(entity);
+        }
+      });
+    } catch (Exception e) {
+      LOG.error("Container start event could not be published for "
+                      + container.getId().toString(),
+              e instanceof UndeclaredThrowableException ? e.getCause() : e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/submit/ModelSubmission.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/submit/ModelSubmission.java b/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/submit/ModelSubmission.java
new file mode 100644
index 0000000..46961cb
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/submit/ModelSubmission.java
@@ -0,0 +1,247 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.maas.submit;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Iterables;
+import org.apache.commons.cli.*;
+import org.apache.commons.io.IOUtils;
+import org.apache.curator.RetryPolicy;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.metron.maas.config.*;
+import org.apache.metron.maas.discovery.ServiceDiscoverer;
+import org.apache.metron.maas.util.ConfigUtil;
+import org.apache.metron.maas.queue.Queue;
+import org.apache.metron.maas.queue.ZKQueue;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.util.AbstractMap;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+
+public class ModelSubmission {
+  public enum ModelSubmissionOptions {
+    HELP("h", code -> {
+      Option o = new Option(code, "help", false, "This screen");
+      o.setRequired(false);
+      return o;
+    })
+    ,ZK_QUORUM("zq", code -> {
+      Option o = new Option(code, "zk_quorum", true, "Zookeeper Quorum");
+      o.setRequired(true);
+      return o;
+    })
+    ,ZK_ROOT("zr", code -> {
+      Option o = new Option(code, "zk_root", true, "Zookeeper Root");
+      o.setRequired(false);
+      return o;
+    })
+    ,LOCAL_MODEL_PATH("lmp", code -> {
+      Option o = new Option(code, "local_model_path", true, "Model Path (local)");
+      o.setRequired(false);
+      return o;
+    })
+    ,HDFS_MODEL_PATH("hmp", code -> {
+      Option o = new Option(code, "hdfs_model_path", true, "Model Path (HDFS)");
+      o.setRequired(false);
+      return o;
+    })
+    ,NAME("n", code -> {
+      Option o = new Option(code, "name", true, "Model Name");
+      o.setRequired(false);
+      return o;
+    })
+    ,VERSION("v", code -> {
+      Option o = new Option(code, "version", true, "Model version");
+      o.setRequired(false);
+      return o;
+    })
+    ,NUM_INSTANCES("ni", code -> {
+      Option o = new Option(code, "num_instances", true, "Number of model instances");
+      o.setRequired(false);
+      return o;
+    })
+    ,MEMORY("m", code -> {
+      Option o = new Option(code, "memory", true, "Memory for container");
+      o.setRequired(false);
+      return o;
+    })
+    ,MODE("mo", code -> {
+      Option o = new Option(code, "mode", true, "ADD, LIST or REMOVE");
+      o.setRequired(true);
+      return o;
+    })
+    ;
+    Option option;
+    String shortCode;
+    ModelSubmissionOptions(String shortCode
+                 , Function<String, Option> optionHandler
+                 ) {
+      this.shortCode = shortCode;
+      this.option = optionHandler.apply(shortCode);
+    }
+
+    public boolean has(CommandLine cli) {
+      return cli.hasOption(shortCode);
+    }
+
+    public String get(CommandLine cli) {
+      return cli.getOptionValue(shortCode);
+    }
+    public String get(CommandLine cli, String def) {
+      return has(cli)?cli.getOptionValue(shortCode):def;
+    }
+
+    public Map.Entry<ModelSubmissionOptions, String> of(String value) {
+      if(option.hasArg()) {
+        return new AbstractMap.SimpleEntry<>(this, value);
+      }
+      return new AbstractMap.SimpleEntry<>(this, null);
+    }
+
+    public static String toArgs(Map.Entry<ModelSubmissionOptions, String> ... arg) {
+      return
+      Joiner.on(" ").join(Iterables.transform(Arrays.asList(arg)
+                                             , a -> "-" + a.getKey().option.getOpt()
+                                                  + a.getValue() == null?"":(" " + a.getValue())
+                                             )
+                         );
+
+    }
+    public static CommandLine parse(CommandLineParser parser, String[] args) throws ParseException {
+      try {
+        CommandLine cli = parser.parse(getOptions(), args);
+        if(HELP.has(cli)) {
+          printHelp();
+          System.exit(0);
+        }
+        return cli;
+      } catch (ParseException e) {
+        System.err.println("Unable to parse args: " + Joiner.on(' ').join(args));
+        e.printStackTrace(System.err);
+        printHelp();
+        throw e;
+      }
+    }
+
+    public static void printHelp() {
+      HelpFormatter formatter = new HelpFormatter();
+      formatter.printHelp( "ModelSubmission", getOptions());
+    }
+
+    public static Options getOptions() {
+      Options ret = new Options();
+      for(ModelSubmissionOptions o : ModelSubmissionOptions.values()) {
+        ret.addOption(o.option);
+      }
+      return ret;
+    }
+  }
+
+  public void execute(FileSystem fs, String... argv) throws Exception {
+    CommandLine cli = ModelSubmissionOptions.parse(new PosixParser(), argv);
+    ModelRequest request = null;
+    CuratorFramework client = null;
+    try {
+      RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
+      client = CuratorFrameworkFactory.newClient(ModelSubmissionOptions.ZK_QUORUM.get(cli), retryPolicy);
+      client.start();
+      MaaSConfig config = ConfigUtil.INSTANCE.read(client, ModelSubmissionOptions.ZK_ROOT.get(cli, "/metron/maas/config"), new MaaSConfig(), MaaSConfig.class);
+      String mode = ModelSubmissionOptions.MODE.get(cli);
+      if ( mode.equalsIgnoreCase("ADD")) {
+        request = new ModelRequest() {{
+          setName(ModelSubmissionOptions.NAME.get(cli));
+          setAction(Action.ADD);
+          setVersion(ModelSubmissionOptions.VERSION.get(cli));
+          setNumInstances(Integer.parseInt(ModelSubmissionOptions.NUM_INSTANCES.get(cli)));
+          setMemory(Integer.parseInt(ModelSubmissionOptions.MEMORY.get(cli)));
+          setPath(ModelSubmissionOptions.HDFS_MODEL_PATH.get(cli));
+        }};
+      } else if(mode.equalsIgnoreCase("REMOVE")) {
+        request = new ModelRequest() {{
+          setName(ModelSubmissionOptions.NAME.get(cli));
+          setAction(Action.REMOVE);
+          setNumInstances(Integer.parseInt(ModelSubmissionOptions.NUM_INSTANCES.get(cli)));
+          setVersion(ModelSubmissionOptions.VERSION.get(cli));
+        }};
+      }
+      else if(mode.equalsIgnoreCase("LIST")) {
+        String name = ModelSubmissionOptions.NAME.get(cli, null);
+        String version = ModelSubmissionOptions.VERSION.get(cli, null);
+        ServiceDiscoverer serviceDiscoverer = new ServiceDiscoverer(client, config.getServiceRoot());
+
+        Model model = new Model(name, version);
+        Map<Model, List<ModelEndpoint>> endpoints = serviceDiscoverer.listEndpoints(model);
+        for(Map.Entry<Model, List<ModelEndpoint>> kv : endpoints.entrySet()) {
+          String modelTitle = "Model " + kv.getKey().getName() + " @ " + kv.getKey().getVersion();
+          System.out.println(modelTitle);
+          for(ModelEndpoint endpoint : kv.getValue()){
+            System.out.println("\t" + endpoint.getContainerId() + " at " + endpoint.getUrl());
+          }
+        }
+      }
+
+
+      if (ModelSubmissionOptions.LOCAL_MODEL_PATH.has(cli)) {
+        File localDir = new File(ModelSubmissionOptions.LOCAL_MODEL_PATH.get(cli));
+        Path hdfsPath = new Path(ModelSubmissionOptions.HDFS_MODEL_PATH.get(cli));
+        updateHDFS(fs, localDir, hdfsPath);
+      }
+      Queue queue = config.createQueue(ImmutableMap.of(ZKQueue.ZK_CLIENT, client));
+      queue.enqueue(request);
+    } finally {
+      if (client != null) {
+        client.close();
+      }
+    }
+  }
+
+  public static void main(String... argv) throws Exception {
+    FileSystem fs = FileSystem.get(new Configuration());
+    ModelSubmission submission = new ModelSubmission();
+    submission.execute(fs, argv);
+  }
+
+  public static void updateHDFS(FileSystem fs, File localDir, Path hdfsPath) throws IOException {
+    if(localDir.exists() && localDir.isDirectory()) {
+      if(!fs.exists(hdfsPath)) {
+        fs.mkdirs(hdfsPath);
+      }
+      for(File f : localDir.listFiles()) {
+        Path p = new Path(hdfsPath, f.getName());
+        FSDataOutputStream out = fs.create(p);
+        BufferedInputStream in = new BufferedInputStream(new FileInputStream(f));
+        IOUtils.copy(in, out);
+        IOUtils.closeQuietly(in);
+        IOUtils.closeQuietly(out);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/util/Utils.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/util/Utils.java b/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/util/Utils.java
new file mode 100644
index 0000000..eec3f67
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/main/java/org/apache/metron/maas/util/Utils.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.maas.util;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public enum Utils {
+  INSTANCE;
+  public <T> List<T> toList(T[] arr) {
+    List<T> ret = new ArrayList<T>(arr.length);
+    for(T o : arr) {
+      ret.add(o);
+    }
+    return ret;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/main/scripts/maas_deploy.sh
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/main/scripts/maas_deploy.sh b/metron-analytics/metron-maas-service/src/main/scripts/maas_deploy.sh
new file mode 100755
index 0000000..4413af5
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/main/scripts/maas_deploy.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
+export METRON_VERSION=0.2.0BETA
+export METRON_HOME=/usr/metron/$METRON_VERSION
+export DM_JAR=metron-maas-service-$METRON_VERSION-uber.jar
+CP=$METRON_HOME/lib/$DM_JAR
+HADOOP_CLASSPATH=$(echo $CP )
+for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
+  if [ -f $jar ];then
+    LIBJARS="$jar,$LIBJARS"
+  fi
+done
+export HADOOP_CLASSPATH
+
+yarn jar $METRON_HOME/lib/$DM_JAR org.apache.metron.maas.submit.ModelSubmission "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/main/scripts/maas_service.sh
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/main/scripts/maas_service.sh b/metron-analytics/metron-maas-service/src/main/scripts/maas_service.sh
new file mode 100755
index 0000000..fc3ccce
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/main/scripts/maas_service.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# 
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+# 
+#     http://www.apache.org/licenses/LICENSE-2.0
+# 
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# 
+
+BIGTOP_DEFAULTS_DIR=${BIGTOP_DEFAULTS_DIR-/etc/default}
+[ -n "${BIGTOP_DEFAULTS_DIR}" -a -r ${BIGTOP_DEFAULTS_DIR}/hbase ] && . ${BIGTOP_DEFAULTS_DIR}/hbase
+
+# Autodetect JAVA_HOME if not defined
+if [ -e /usr/libexec/bigtop-detect-javahome ]; then
+  . /usr/libexec/bigtop-detect-javahome
+elif [ -e /usr/lib/bigtop-utils/bigtop-detect-javahome ]; then
+  . /usr/lib/bigtop-utils/bigtop-detect-javahome
+fi
+
+export HBASE_HOME=${HBASE_HOME:-/usr/hdp/current/hbase-client}
+export METRON_VERSION=0.2.0BETA
+export METRON_HOME=/usr/metron/$METRON_VERSION
+export DM_JAR=metron-maas-service-$METRON_VERSION-uber.jar
+CP=$METRON_HOME/lib/$DM_JAR
+HADOOP_CLASSPATH=$(echo $CP )
+for jar in $(echo $HADOOP_CLASSPATH | sed 's/:/ /g');do
+  if [ -f $jar ];then
+    LIBJARS="$jar,$LIBJARS"
+  fi
+done
+export HADOOP_CLASSPATH
+
+yarn jar $METRON_HOME/lib/$DM_JAR org.apache.metron.maas.service.Client "$@"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/test/java/org/apache/metron/maas/service/MaasIntegrationTest.java
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/test/java/org/apache/metron/maas/service/MaasIntegrationTest.java b/metron-analytics/metron-maas-service/src/test/java/org/apache/metron/maas/service/MaasIntegrationTest.java
new file mode 100644
index 0000000..658cf4d
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/test/java/org/apache/metron/maas/service/MaasIntegrationTest.java
@@ -0,0 +1,512 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.metron.maas.service;
+import java.io.*;
+import java.net.HttpURLConnection;
+import java.net.InetAddress;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import com.google.common.base.Splitter;
+import com.google.common.collect.Iterables;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.curator.RetryPolicy;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.ExponentialBackoffRetry;
+import org.apache.curator.test.TestingServer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.util.JarFinder;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.MiniYARNCluster;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
+import org.apache.metron.maas.discovery.ServiceDiscoverer;
+import org.apache.metron.maas.config.MaaSConfig;
+import org.apache.metron.maas.config.Model;
+import org.apache.metron.maas.config.ModelEndpoint;
+import org.apache.metron.maas.queue.ZKQueue;
+import org.apache.metron.maas.submit.ModelSubmission;
+import org.apache.metron.maas.util.ConfigUtil;
+import org.apache.zookeeper.KeeperException;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class MaasIntegrationTest {
+  private static final Log LOG =
+          LogFactory.getLog(MaasIntegrationTest.class);
+
+  protected MiniYARNCluster yarnCluster = null;
+  protected YarnConfiguration conf = null;
+  private static final int NUM_NMS = 1;
+
+  protected final static String APPMASTER_JAR =
+          JarFinder.getJar(ApplicationMaster.class);
+  private TestingServer testZkServer;
+  private String zookeeperUrl;
+  private CuratorFramework client;
+  @Before
+  public void setup() throws Exception {
+    setupInternal(NUM_NMS);
+  }
+
+  protected void setupInternal(int numNodeManager) throws Exception {
+
+    LOG.info("Starting up YARN cluster");
+    testZkServer = new TestingServer(true);
+    zookeeperUrl = testZkServer.getConnectString();
+    RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
+    client = CuratorFrameworkFactory.newClient(zookeeperUrl, retryPolicy);
+    client.start();
+    conf = new YarnConfiguration();
+    conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 128);
+    conf.set("yarn.log.dir", "target");
+    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
+    conf.set(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class.getName());
+    conf.setBoolean(YarnConfiguration.NODE_LABELS_ENABLED, true);
+
+    if (yarnCluster == null) {
+      yarnCluster =
+              new MiniYARNCluster(MaasIntegrationTest.class.getSimpleName(), 1,
+                      numNodeManager, 1, 1, true);
+      yarnCluster.init(conf);
+
+      yarnCluster.start();
+
+      waitForNMsToRegister();
+
+      URL url = Thread.currentThread().getContextClassLoader().getResource("yarn-site.xml");
+      if (url == null) {
+        throw new RuntimeException("Could not find 'yarn-site.xml' dummy file in classpath");
+      }
+      Configuration yarnClusterConfig = yarnCluster.getConfig();
+      yarnClusterConfig.set("yarn.application.classpath", new File(url.getPath()).getParent());
+      //write the document to a buffer (not directly to the file, as that
+      //can cause the file being written to get read -which will then fail.
+      ByteArrayOutputStream bytesOut = new ByteArrayOutputStream();
+      yarnClusterConfig.writeXml(bytesOut);
+      bytesOut.close();
+      //write the bytes to the file in the classpath
+      OutputStream os = new FileOutputStream(new File(url.getPath()));
+      os.write(bytesOut.toByteArray());
+      os.close();
+    }
+    FileContext fsContext = FileContext.getLocalFSFileContext();
+    fsContext
+            .delete(
+                    new Path(conf
+                            .get("yarn.timeline-service.leveldb-timeline-store.path")),
+                    true);
+    try {
+      Thread.sleep(2000);
+    } catch (InterruptedException e) {
+      LOG.info("setup thread sleep interrupted. message=" + e.getMessage());
+    }
+  }
+
+  @After
+  public void tearDown() throws IOException {
+    if(testZkServer != null) {
+      testZkServer.close();
+    }
+    if (yarnCluster != null) {
+      try {
+        yarnCluster.stop();
+      } finally {
+        yarnCluster = null;
+      }
+    }
+    FileContext fsContext = FileContext.getLocalFSFileContext();
+    fsContext
+            .delete(
+                    new Path(conf
+                            .get("yarn.timeline-service.leveldb-timeline-store.path")),
+                    true);
+  }
+
+  @Test(timeout=900000)
+  public void testMaaSWithDomain() throws Exception {
+    testDSShell(true);
+  }
+
+  @Test(timeout=900000)
+  public void testMaaSWithoutDomain() throws Exception {
+    testDSShell(false);
+  }
+
+  public void testDSShell(boolean haveDomain) throws Exception {
+    MaaSConfig config = new MaaSConfig() {{
+      setServiceRoot("/maas/service");
+      setQueueConfig(new HashMap<String, Object>() {{
+        put(ZKQueue.ZK_PATH, "/maas/queue");
+      }});
+    }};
+    String configRoot = "/maas/config";
+    byte[] configData = ConfigUtil.INSTANCE.toBytes(config);
+    try {
+      client.setData().forPath(configRoot, configData);
+    }
+    catch(KeeperException.NoNodeException e) {
+      client.create().creatingParentsIfNeeded().forPath(configRoot, configData);
+    }
+    String[] args = {
+            "--jar", APPMASTER_JAR,
+            "--zk_quorum", zookeeperUrl,
+            "--zk_root", configRoot,
+            "--master_memory", "512",
+            "--master_vcores", "2",
+    };
+    if (haveDomain) {
+      String[] domainArgs = {
+              "--domain",
+              "TEST_DOMAIN",
+              "--view_acls",
+              "reader_user reader_group",
+              "--modify_acls",
+              "writer_user writer_group",
+              "--create"
+      };
+      List<String> argsList = new ArrayList<String>(Arrays.asList(args));
+      argsList.addAll(Arrays.asList(domainArgs));
+      args = argsList.toArray(new String[argsList.size()]);
+    }
+
+    LOG.info("Initializing DS Client");
+    final Client client = new Client(new Configuration(yarnCluster.getConfig()));
+    boolean initSuccess = client.init(args);
+    Assert.assertTrue(initSuccess);
+    LOG.info("Running DS Client");
+    final AtomicBoolean result = new AtomicBoolean(false);
+    Thread t = new Thread() {
+      public void run() {
+        try {
+          result.set(client.run());
+        } catch (Exception e) {
+          throw new RuntimeException(e);
+        }
+      }
+    };
+    t.start();
+
+    YarnClient yarnClient = YarnClient.createYarnClient();
+    yarnClient.init(new Configuration(yarnCluster.getConfig()));
+    yarnClient.start();
+    String hostName = NetUtils.getHostname();
+
+    boolean verified = false;
+    String errorMessage = "";
+    while(!verified) {
+      List<ApplicationReport> apps = yarnClient.getApplications();
+      if (apps.size() == 0 ) {
+        Thread.sleep(10);
+        continue;
+      }
+      ApplicationReport appReport = apps.get(0);
+      if(appReport.getHost().equals("N/A")) {
+        Thread.sleep(10);
+        continue;
+      }
+      errorMessage =
+              "Expected host name to start with '" + hostName + "', was '"
+                      + appReport.getHost() + "'. Expected rpc port to be '-1', was '"
+                      + appReport.getRpcPort() + "'.";
+      if (checkHostname(appReport.getHost()) && appReport.getRpcPort() == -1) {
+        verified = true;
+      }
+      if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED) {
+        break;
+      }
+    }
+    Assert.assertTrue(errorMessage, verified);
+    FileSystem fs = FileSystem.get(conf);
+    try {
+      new ModelSubmission().execute(FileSystem.get(conf)
+              , new String[]{
+                      "--name", "dummy",
+                      "--version", "1.0",
+                      "--zk_quorum", zookeeperUrl,
+                      "--zk_root", configRoot,
+                      "--local_model_path", "src/test/resources/maas",
+                      "--hdfs_model_path", new Path(fs.getHomeDirectory(), "maas/dummy").toString(),
+                      "--num_instances", "1",
+                      "--memory", "100",
+                      "--mode", "ADD",
+
+              });
+      ServiceDiscoverer discoverer = new ServiceDiscoverer(this.client, config.getServiceRoot());
+      discoverer.start();
+      {
+        boolean passed = false;
+        for (int i = 0; i < 100; ++i) {
+          try {
+            List<ModelEndpoint> endpoints = discoverer.getEndpoints(new Model("dummy", "1.0"));
+            if (endpoints != null && endpoints.size() == 1) {
+              String output = makeRESTcall(new URL(endpoints.get(0).getUrl() + "/echo/casey"));
+              if (output.contains("casey")) {
+                passed = true;
+                break;
+              }
+            }
+          } catch (Exception e) {
+          }
+          Thread.sleep(2000);
+        }
+        Assert.assertTrue(passed);
+      }
+
+      {
+        List<ModelEndpoint> endpoints = discoverer.getEndpoints(new Model("dummy", "1.0"));
+        Assert.assertNotNull(endpoints);
+        Assert.assertEquals(1, endpoints.size());
+      }
+      new ModelSubmission().execute(FileSystem.get(conf)
+              , new String[]{
+                      "--name", "dummy",
+                      "--version", "1.0",
+                      "--zk_quorum", zookeeperUrl,
+                      "--zk_root", configRoot,
+                      "--num_instances", "1",
+                      "--mode", "REMOVE",
+
+              });
+      {
+        boolean passed = false;
+        for (int i = 0; i < 100; ++i) {
+          try {
+            List<ModelEndpoint> endpoints = discoverer.getEndpoints(new Model("dummy", "1.0"));
+            //ensure that the endpoint is dead.
+            if (endpoints == null || endpoints.size() == 0) {
+              passed = true;
+              break;
+            }
+          } catch (Exception e) {
+          }
+          Thread.sleep(2000);
+        }
+        Assert.assertTrue(passed);
+      }
+    }
+    finally {
+      cleanup();
+    }
+  }
+
+  private void cleanup() {
+    try {
+      System.out.println("Cleaning up...");
+      String line;
+      Process p = Runtime.getRuntime().exec("ps -e");
+      BufferedReader input =
+              new BufferedReader(new InputStreamReader(p.getInputStream()));
+      while ((line = input.readLine()) != null) {
+        if(line.contains("dummy_rest.sh")) {
+          String pid = Iterables.get(Splitter.on(" ").split(line.replaceAll("\\s+", " ")), 0);
+          System.out.println("Killing " + pid + " from " + line);
+          Runtime.getRuntime().exec("kill -9 " + pid);
+        }
+      }
+      input.close();
+    } catch (Exception err) {
+      err.printStackTrace();
+    }
+  }
+  private String makeRESTcall(URL url) throws IOException {
+    HttpURLConnection conn = null;
+    //make connection
+    try{
+      conn = (HttpURLConnection) url.openConnection();
+      conn.setRequestMethod("GET");
+
+      if (conn.getResponseCode() != 200) {
+        throw new RuntimeException("Failed : HTTP error code : "
+                + conn.getResponseCode());
+      }
+
+      BufferedReader br = new BufferedReader(new InputStreamReader(
+              (conn.getInputStream())));
+
+      String output = "";
+      String line;
+      while ((line = br.readLine()) != null) {
+        output += line + "\n";
+      }
+      return output;
+    }
+    finally {
+      if(conn != null) {
+        conn.disconnect();
+      }
+    }
+  }
+
+  /*
+   * NetUtils.getHostname() returns a string in the form "hostname/ip".
+   * Sometimes the hostname we get is the FQDN and sometimes the short name. In
+   * addition, on machines with multiple network interfaces, it runs any one of
+   * the ips. The function below compares the returns values for
+   * NetUtils.getHostname() accounting for the conditions mentioned.
+   */
+  private boolean checkHostname(String appHostname) throws Exception {
+
+    String hostname = NetUtils.getHostname();
+    if (hostname.equals(appHostname)) {
+      return true;
+    }
+
+    Assert.assertTrue("Unknown format for hostname " + appHostname,
+            appHostname.contains("/"));
+    Assert.assertTrue("Unknown format for hostname " + hostname,
+            hostname.contains("/"));
+
+    String[] appHostnameParts = appHostname.split("/");
+    String[] hostnameParts = hostname.split("/");
+
+    return (compareFQDNs(appHostnameParts[0], hostnameParts[0]) && checkIPs(
+            hostnameParts[0], hostnameParts[1], appHostnameParts[1]));
+  }
+
+  private boolean compareFQDNs(String appHostname, String hostname)
+          throws Exception {
+    if (appHostname.equals(hostname)) {
+      return true;
+    }
+    String appFQDN = InetAddress.getByName(appHostname).getCanonicalHostName();
+    String localFQDN = InetAddress.getByName(hostname).getCanonicalHostName();
+    return appFQDN.equals(localFQDN);
+  }
+
+  private boolean checkIPs(String hostname, String localIP, String appIP)
+          throws Exception {
+
+    if (localIP.equals(appIP)) {
+      return true;
+    }
+    boolean appIPCheck = false;
+    boolean localIPCheck = false;
+    InetAddress[] addresses = InetAddress.getAllByName(hostname);
+    for (InetAddress ia : addresses) {
+      if (ia.getHostAddress().equals(appIP)) {
+        appIPCheck = true;
+        continue;
+      }
+      if (ia.getHostAddress().equals(localIP)) {
+        localIPCheck = true;
+      }
+    }
+    return (appIPCheck && localIPCheck);
+
+  }
+
+
+
+  protected void waitForNMsToRegister() throws Exception {
+    int sec = 60;
+    while (sec >= 0) {
+      if (yarnCluster.getResourceManager().getRMContext().getRMNodes().size()
+              >= NUM_NMS) {
+        break;
+      }
+      Thread.sleep(1000);
+      sec--;
+    }
+  }
+  private int verifyContainerLog(int containerNum,
+                                 List<String> expectedContent, boolean count, String expectedWord) {
+    File logFolder =
+            new File(yarnCluster.getNodeManager(0).getConfig()
+                    .get(YarnConfiguration.NM_LOG_DIRS,
+                            YarnConfiguration.DEFAULT_NM_LOG_DIRS));
+
+    File[] listOfFiles = logFolder.listFiles();
+    int currentContainerLogFileIndex = -1;
+    for (int i = listOfFiles.length - 1; i >= 0; i--) {
+      if (listOfFiles[i].listFiles().length == containerNum + 1) {
+        currentContainerLogFileIndex = i;
+        break;
+      }
+    }
+    Assert.assertTrue(currentContainerLogFileIndex != -1);
+    File[] containerFiles =
+            listOfFiles[currentContainerLogFileIndex].listFiles();
+
+    int numOfWords = 0;
+    for (int i = 0; i < containerFiles.length; i++) {
+      for (File output : containerFiles[i].listFiles()) {
+        if (output.getName().trim().contains("stdout")) {
+          BufferedReader br = null;
+          List<String> stdOutContent = new ArrayList<String>();
+          try {
+
+            String sCurrentLine;
+            br = new BufferedReader(new FileReader(output));
+            int numOfline = 0;
+            while ((sCurrentLine = br.readLine()) != null) {
+              if (count) {
+                if (sCurrentLine.contains(expectedWord)) {
+                  numOfWords++;
+                }
+              } else if (output.getName().trim().equals("stdout")){
+                if (! Shell.WINDOWS) {
+                  Assert.assertEquals("The current is" + sCurrentLine,
+                          expectedContent.get(numOfline), sCurrentLine.trim());
+                  numOfline++;
+                } else {
+                  stdOutContent.add(sCurrentLine.trim());
+                }
+              }
+            }
+            /* By executing bat script using cmd /c,
+             * it will output all contents from bat script first
+             * It is hard for us to do check line by line
+             * Simply check whether output from bat file contains
+             * all the expected messages
+             */
+            if (Shell.WINDOWS && !count
+                    && output.getName().trim().equals("stdout")) {
+              Assert.assertTrue(stdOutContent.containsAll(expectedContent));
+            }
+          } catch (IOException e) {
+            e.printStackTrace();
+          } finally {
+            try {
+              if (br != null)
+                br.close();
+            } catch (IOException ex) {
+              ex.printStackTrace();
+            }
+          }
+        }
+      }
+    }
+    return numOfWords;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/test/resources/log4j.properties b/metron-analytics/metron-maas-service/src/test/resources/log4j.properties
new file mode 100644
index 0000000..74a9b22
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/test/resources/log4j.properties
@@ -0,0 +1,19 @@
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+
+# log4j configuration used during build and unit tests
+
+log4j.rootLogger=error,stdout
+log4j.threshhold=ALL
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2} (%F:%M(%L)) - %m%n
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/test/resources/maas/dummy_rest.sh
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/test/resources/maas/dummy_rest.sh b/metron-analytics/metron-maas-service/src/test/resources/maas/dummy_rest.sh
new file mode 100644
index 0000000..2b739a5
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/test/resources/maas/dummy_rest.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+
+rm -f out
+mkfifo out
+trap "rm -f out" EXIT
+echo "http://localhost:1500" > endpoint.dat
+while true
+do
+  cat out | nc -l 0.0.0.0 1500 > >( # parse the netcat output, to build the answer redirected to the pipe "out".
+    export REQUEST=
+    while read line
+    do
+      line=$(echo "$line" | tr -d '[\r\n]')
+
+      if echo "$line" | grep -qE '^GET /' # if line starts with "GET /"
+      then
+        REQUEST=$(echo "$line" | cut -d ' ' -f2) # extract the request
+      elif [ "x$line" = x ] # empty line / end of request
+      then
+        HTTP_200="HTTP/1.1 200 OK"
+        HTTP_LOCATION="Location:"
+        HTTP_404="HTTP/1.1 404 Not Found"
+        # call a script here
+        # Note: REQUEST is exported, so the script can parse it (to answer 200/403/404 status code + content)
+        if echo $REQUEST | grep -qE '^/echo/'
+        then
+            printf "%s\n%s %s\n\n%s\n" "$HTTP_200" "$HTTP_LOCATION" $REQUEST ${REQUEST#"/echo/"} > out
+        else
+            printf "%s\n%s %s\n\n%s\n" "$HTTP_404" "$HTTP_LOCATION" $REQUEST "Resource $REQUEST NOT FOUND!" > out
+        fi
+      fi
+    done
+  )
+done
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/metron-maas-service/src/test/resources/yarn-site.xml
----------------------------------------------------------------------
diff --git a/metron-analytics/metron-maas-service/src/test/resources/yarn-site.xml b/metron-analytics/metron-maas-service/src/test/resources/yarn-site.xml
new file mode 100644
index 0000000..1300501
--- /dev/null
+++ b/metron-analytics/metron-maas-service/src/test/resources/yarn-site.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+    http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+<configuration>
+</configuration>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-analytics/pom.xml
----------------------------------------------------------------------
diff --git a/metron-analytics/pom.xml b/metron-analytics/pom.xml
new file mode 100644
index 0000000..8930f5f
--- /dev/null
+++ b/metron-analytics/pom.xml
@@ -0,0 +1,120 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software
+	Foundation (ASF) under one or more contributor license agreements. See the
+	NOTICE file distributed with this work for additional information regarding
+	copyright ownership. The ASF licenses this file to You under the Apache License,
+	Version 2.0 (the "License"); you may not use this file except in compliance
+	with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
+	Unless required by applicable law or agreed to in writing, software distributed
+	under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
+	OR CONDITIONS OF ANY KIND, either express or implied. See the License for
+  the specific language governing permissions and limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+		 xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>org.apache.metron</groupId>
+	<artifactId>metron-analytics</artifactId>
+	<packaging>pom</packaging>
+	<name>metron-analytics</name>
+	<parent>
+		<groupId>org.apache.metron</groupId>
+		<artifactId>Metron</artifactId>
+		<version>0.2.0BETA</version>
+	</parent>
+	<description>Stream analytics for Metron</description>
+	<url>https://metron.incubator.apache.org/</url>
+	<scm>
+		<connection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-metron.git</connection>
+		<developerConnection>scm:git:https://git-wip-us.apache.org/repos/asf/incubator-metron.git</developerConnection>
+		<tag>HEAD</tag>
+		<url>https://git-wip-us.apache.org/repos/asf/incubator-metron</url>
+	</scm>
+
+	<licenses>
+		<license>
+			<name>The Apache Software License, Version 2.0</name>
+			<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+			<distribution>repo</distribution>
+		</license>
+	</licenses>
+	<modules>
+		<module>metron-maas-service</module>
+		<module>metron-maas-common</module>
+	</modules>
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.12</version>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>org.adrianwalker</groupId>
+			<artifactId>multiline-string</artifactId>
+			<version>0.1.2</version>
+			<scope>test</scope>
+		</dependency>
+	</dependencies>
+	<build>
+		<plugins>
+
+		</plugins>
+	</build>
+	<reporting>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-surefire-plugin</artifactId>
+				<version>2.18</version>
+				<configuration>
+					<argLine>-Xmx2048m -XX:MaxPermSize=256m</argLine>
+					<systemProperties>
+						<property>
+							<name>mode</name>
+							<value>local</value>
+						</property>
+					</systemProperties>
+				</configuration>
+			</plugin>
+			<!-- Normally, dependency report takes time, skip it -->
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-project-info-reports-plugin</artifactId>
+				<version>2.7</version>
+				<configuration>
+					<dependencyLocationsEnabled>false</dependencyLocationsEnabled>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-pmd-plugin</artifactId>
+				<version>3.3</version>
+				<configuration>
+          <targetJdk>${global_java_version}</targetJdk>
+				</configuration>
+			</plugin>
+			<plugin>
+				<groupId>org.codehaus.mojo</groupId>
+				<artifactId>emma-maven-plugin</artifactId>
+				<version>1.0-alpha-3</version>
+				<inherited>true</inherited>
+			</plugin>
+		</plugins>
+	</reporting>
+	<repositories>
+		<repository>
+			<id>clojars.org</id>
+			<url>http://clojars.org/repo</url>
+		</repository>
+		<repository>
+			<id>multiline-release-repo</id>
+			<url>https://raw.github.com/benelog/multiline/master/maven-repository</url>
+			<snapshots>
+				<enabled>false</enabled>
+			</snapshots>
+		</repository>
+    </repositories>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-deployment/roles/metron_streaming/defaults/main.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/defaults/main.yml b/metron-deployment/roles/metron_streaming/defaults/main.yml
index 8618c38..593c0a8 100644
--- a/metron-deployment/roles/metron_streaming/defaults/main.yml
+++ b/metron-deployment/roles/metron_streaming/defaults/main.yml
@@ -30,6 +30,7 @@ metron_indexing_bundle_name: metron-indexing-{{ metron_version }}-archive.tar.gz
 metron_solr_bundle_name: metron-solr-{{ metron_version }}-archive.tar.gz
 metron_elasticsearch_bundle_name: metron-elasticsearch-{{ metron_version }}-archive.tar.gz
 metron_parsers_bundle_name: metron-parsers-{{ metron_version }}-archive.tar.gz
+metron_maas_bundle_name: metron-maas-service-{{ metron_version }}-archive.tar.gz
 metron_pcap_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-pcap-backend/target/{{ metron_pcap_bundle_name }}"
 metron_common_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-common/target/{{ metron_common_bundle_name }}"
 metron_data_management_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-data-management/target/{{ metron_data_management_bundle_name }}"
@@ -38,6 +39,7 @@ metron_indexing_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-in
 metron_solr_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-solr/target/{{ metron_solr_bundle_name }}"
 metron_elasticsearch_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-elasticsearch/target/{{ metron_elasticsearch_bundle_name }}"
 metron_parsers_bundle_path: "{{ playbook_dir }}/../../metron-platform/metron-parsers/target/{{ metron_parsers_bundle_name }}"
+metron_maas_bundle_path: "{{ playbook_dir }}/../../metron-analytics/metron-maas-service/target/{{ metron_maas_bundle_name }}"
 
 config_path: "{{ metron_directory }}/config"
 zookeeper_config_path: "{{ config_path }}/zookeeper"

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/metron-deployment/roles/metron_streaming/tasks/copy_bundles.yml
----------------------------------------------------------------------
diff --git a/metron-deployment/roles/metron_streaming/tasks/copy_bundles.yml b/metron-deployment/roles/metron_streaming/tasks/copy_bundles.yml
index e295139..12dd411 100644
--- a/metron-deployment/roles/metron_streaming/tasks/copy_bundles.yml
+++ b/metron-deployment/roles/metron_streaming/tasks/copy_bundles.yml
@@ -56,6 +56,11 @@
     src: "{{ metron_pcap_bundle_path }}"
     dest: "{{ metron_directory }}"
 
+- name: Copy Metron MaaS bundle
+  copy:
+    src: "{{ metron_maas_bundle_path }}"
+    dest: "{{ metron_directory }}"
+
 - name: Unbundle Metron bundles
   shell: "{{ item }}"
   args:
@@ -69,4 +74,5 @@
     - tar xzvf metron-data-management*.tar.gz
     - tar xzvf metron-common*.tar.gz
     - tar xzvf metron-pcap-backend*.tar.gz
+    - tar xzvf metron-maas-service*.tar.gz
     - rm *.tar.gz

http://git-wip-us.apache.org/repos/asf/incubator-metron/blob/e5d75e70/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 9817211..c138d2b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -25,6 +25,7 @@
     <description>Metron Top Level Project</description>
     <url>https://metron.incubator.apache.org/</url>
     <modules>
+            <module>metron-analytics</module>
             <module>metron-platform</module>
     </modules>
     <properties>


Mime
View raw message