eagle-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From yonzhang2...@apache.org
Subject [1/2] incubator-eagle git commit: EAGLE-468 clean up eagle-application-service clean up eagle-application-service
Date Mon, 15 Aug 2016 19:46:41 GMT
Repository: incubator-eagle
Updated Branches:
  refs/heads/develop 18ae3bbbb -> 46afec395


http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/safemodecheck-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/safemodecheck-policy-import.sh b/eagle-hadoop-metric/src/main/resources/safemodecheck-policy-import.sh
deleted file mode 100644
index 32a6bee..0000000
--- a/eagle-hadoop-metric/src/main/resources/safemodecheck-policy-import.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-echo ""
-echo "Importing policy: safeModePolicy "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "safeModePolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[component==\\\"namenode\\\" and metric == \\\"hadoop.namenode.fsnamesystemstate.fsstate\\\" and convert(value, \\\"long\\\") > 0]#window.externalTime(timestamp ,10 min) select metric, host, value, timestamp, component, site insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/capacityused-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/capacityused-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/capacityused-policy-import.sh
deleted file mode 100755
index ad52275..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/capacityused-policy-import.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-echo ""
-echo "Importing policy: capacityUsedPolicy "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "capacityUsedPolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[metric == \\\"hadoop.namenode.fsnamesystemstate.capacityused\\\" and convert(value, \\\"long\\\") > 0]#window.externalTime(timestamp ,10 min) select metric, host, value, timestamp, component, site insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/hastate-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/hastate-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/hastate-policy-import.sh
deleted file mode 100755
index 7801c09..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/hastate-policy-import.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-echo ""
-echo "Importing policy: haStatePolicy "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "haStatePolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from every a = hadoopJmxMetricEventStream[metric==\\\"hadoop.namenode.fsnamesystem.hastate\\\"] -> b = hadoopJmxMetricEventStream[metric==a.metric and b.host == a.host and (convert(a.value, \\\"long\\\") != convert(value, \\\"long\\\"))] within 10 min select a.host, a.value as oldHaState, b.value as newHaState, b.timestamp as timestamp, b.metric as metric, b.component as component, b.site as site insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/lastcheckpointtime-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/lastcheckpointtime-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/lastcheckpointtime-policy-import.sh
deleted file mode 100755
index d3811aa..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/lastcheckpointtime-policy-import.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-echo ""
-echo "Importing policy: lastCheckPointTimePolicy "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "lastCheckPointTimePolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[metric == \\\"hadoop.namenode.dfs.lastcheckpointtime\\\" and (convert(value, \\\"long\\\") + 18000000) < timestamp]#window.externalTime(timestamp ,10 min) select metric, host, value, timestamp, component, site insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"last check point time lag found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/missingblock-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/missingblock-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/missingblock-policy-import.sh
deleted file mode 100755
index be51597..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/missingblock-policy-import.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-echo ""
-echo "Importing policy: missingBlockPolicy "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "missingBlockPolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[metric == \\\"hadoop.namenode.dfs.missingblocks\\\" and convert(value, \\\"long\\\") > 0]#window.externalTime(timestamp ,10 min) select metric, host, value, timestamp, component, site insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/namenodehanoactive-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/namenodehanoactive-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/namenodehanoactive-policy-import.sh
deleted file mode 100755
index 33a7210..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/namenodehanoactive-policy-import.sh
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-
-echo ""
-echo "Importing policy: NamenodeHAHasNoActive "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "NamenodeHAHasNoActive",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[metric == \\\"hadoop.namenode.hastate.active.count\\\" and value == 0 ]select * insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/namenodehawithmorethanoneactive-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/namenodehawithmorethanoneactive-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/namenodehawithmorethanoneactive-policy-import.sh
deleted file mode 100755
index 2afa09e..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/namenodehawithmorethanoneactive-policy-import.sh
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-
-echo ""
-echo "Importing policy: NamenodeHAWithMoreThanOneActive "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "NamenodeHAWithMoreThanOneActive",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[metric == \\\"hadoop.namenode.hastate.active.count\\\" and value > 1]select * insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0
-

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/namenodelag-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/namenodelag-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/namenodelag-policy-import.sh
deleted file mode 100755
index 2ccd37e..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/namenodelag-policy-import.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-echo ""
-echo "Importing Policy: NameNodeLagPolicy"
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "NameNodeLagPolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from every a = hadoopJmxMetricEventStream[metric==\\\"hadoop.namenode.journaltransaction.lastappliedorwrittentxid\\\"] -> b = hadoopJmxMetricEventStream[metric==a.metric and b.host != a.host and (max(convert(a.value, \\\"long\\\")) + 100) <= max(convert(value, \\\"long\\\"))] within 5 min select a.host as hostA, a.value as transactIdA, b.host as hostB, b.value as transactIdB insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"name node lag found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/nodecount-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/nodecount-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/nodecount-policy-import.sh
deleted file mode 100755
index 90e0114..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/nodecount-policy-import.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-echo ""
-echo "Importing policy: dataNodeCountPolicy "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "dataNodeCountPolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from every (e1 = hadoopJmxMetricEventStream[metric == \\\"hadoop.namenode.fsnamesystemstate.numlivedatanodes\\\" ]) -> e2 = hadoopJmxMetricEventStream[metric == e1.metric and host == e1.host and (convert(e1.value, \\\"long\\\") - 5) >= convert(value, \\\"long\\\") ] within 5 min select e1.metric, e1.host, e1.value as highNum, e1.timestamp as start, e2.value as lowNum, e2.timestamp as end insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"node count joggling found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhanoactive-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhanoactive-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhanoactive-policy-import.sh
deleted file mode 100755
index 95b9c35..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhanoactive-policy-import.sh
+++ /dev/null
@@ -1,53 +0,0 @@
-#
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-
-echo ""
-echo "Importing policy: ResourceManagerHAHasNoActive "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "ResourceManagerHAHasNoActive",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[metric == \\\"hadoop.resourcemanager.hastate.active.count\\\" and value == 0 ]select * insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhawithmorethanoneactive-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhawithmorethanoneactive-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhawithmorethanoneactive-policy-import.sh
deleted file mode 100755
index 269003c..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/resourcemanagerhawithmorethanoneactive-policy-import.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-
-echo ""
-echo "Importing policy: ResourceManagerHAHasMoreThanOneActive "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "ResourceManagerHAHasMoreThanOneActive",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine",
-         "description":" Resource Manager HA Has More than one Active"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[metric == \\\"hadoop.resourcemanager.hastate.active.count\\\" and value > 1 ]select * insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/sanbox/safemodecheck-policy-import.sh
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/sanbox/safemodecheck-policy-import.sh b/eagle-hadoop-metric/src/main/resources/sanbox/safemodecheck-policy-import.sh
deleted file mode 100755
index 8f1d14f..0000000
--- a/eagle-hadoop-metric/src/main/resources/sanbox/safemodecheck-policy-import.sh
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with`
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-source $(dirname $0)/eagle-env.sh
-source $(dirname $0)/hadoop-metric-init.sh
-
-
-##### add policies ##########
-echo ""
-echo "Importing policy: safeModePolicy "
-curl -u ${EAGLE_SERVICE_USER}:${EAGLE_SERVICE_PASSWD} -X POST -H 'Content-Type:application/json' \
- "http://${EAGLE_SERVICE_HOST}:${EAGLE_SERVICE_PORT}/eagle-service/rest/entities?serviceName=AlertDefinitionService" \
- -d '
- [
-     {
-       "prefix": "alertdef",
-       "tags": {
-         "site": "sandbox",
-         "application": "hadoopJmxMetricDataSource",
-         "policyId": "safeModePolicy",
-         "alertExecutorId": "hadoopJmxMetricAlertExecutor",
-         "policyType": "siddhiCEPEngine"
-       },
-       "description": "jmx metric ",
-       "policyDef": "{\"expression\":\"from hadoopJmxMetricEventStream[component==\\\"namenode\\\" and metric == \\\"hadoop.namenode.fsnamesystemstate.fsstate\\\" and convert(value, \\\"long\\\") > 0]#window.externalTime(timestamp ,10 min) select metric, host, value, timestamp, component, site insert into tmp; \",\"type\":\"siddhiCEPEngine\"}",
-       "enabled": true,
-       "dedupeDef": "{\"alertDedupIntervalMin\":10,\"emailDedupIntervalMin\":10}",
-       "notificationDef": "[{\"notificationType\":\"eagleStore\"},{\"sender\":\"eagle@apache.org\",\"recipients\":\"eagle@apache.org\",\"subject\":\"missing block found.\",\"flavor\":\"email\",\"id\":\"email_1\",\"tplFileName\":\"\"}]"
-     }
- ]
- '
-
- ## Finished
-echo ""
-echo "Finished initialization for eagle topology"
-
-exit 0

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/main/resources/streamdefinitions.json
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/main/resources/streamdefinitions.json b/eagle-hadoop-metric/src/main/resources/streamdefinitions.json
new file mode 100644
index 0000000..66a2c98
--- /dev/null
+++ b/eagle-hadoop-metric/src/main/resources/streamdefinitions.json
@@ -0,0 +1,47 @@
+[
+  {
+    "streamId": "hadoopJmxMetricEventStream",
+    "dataSource": "hadoop_jmx_datasource",
+    "description": "the data stream for hadoop jmx metrics",
+    "validate": false,
+    "timeseries": false,
+    "columns": [
+      {
+        "name": "host",
+        "type": "STRING",
+        "defaultValue": "",
+        "required": true
+      },
+      {
+        "name": "timestamp",
+        "type": "LONG",
+        "defaultValue": 0,
+        "required": true
+      },
+      {
+        "name": "metric",
+        "type": "STRING",
+        "defaultValue": "default_hadoop_jmx_metric_name",
+        "required": true
+      },
+      {
+        "name": "component",
+        "type": "STRING",
+        "defaultValue": "namenode",
+        "required": true
+      },
+      {
+        "name": "site",
+        "type": "STRING",
+        "defaultValue": "hadoop",
+        "required": true
+      },
+      {
+        "name": "value",
+        "type": "DOUBLE",
+        "defaultValue": 0.0,
+        "required": true
+      }
+    ]
+  }
+]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java b/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java
deleted file mode 100644
index 4c7fe6d..0000000
--- a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/HadoopJmxMetricDeserializerTest.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.hadoop.metric;
-
-import org.junit.Test;
-
-/**
- * Created on 1/19/16.
- */
-public class HadoopJmxMetricDeserializerTest {
-    @Test
-    public void test() {
-//        HadoopJmxMetricDeserializer des = new HadoopJmxMetricDeserializer(null);
-//
-//        String m = "{\"host\": \"hostname-1\", \"timestamp\": 1453208956395, \"metric\": \"hadoop.namenode.dfs.lastwrittentransactionid\", \"component\": \"namenode\", \"site\": \"sandbox\", \"value\": \"49716\"}";
-//        Object obj = des.deserialize(m.getBytes());
-//        Assert.assertTrue(obj instanceof Map);
-//        Map<String, Object> metric = (Map<String, Object>) obj;
-//        Assert.assertEquals("hostname-1" ,metric.get("host"));
-//        Assert.assertEquals(1453208956395l ,metric.get("timestamp"));
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java b/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java
deleted file mode 100644
index c8096d6..0000000
--- a/eagle-hadoop-metric/src/test/java/org/apache/eagle/hadoop/metric/TestHadoopMetricSiddhiQL.java
+++ /dev/null
@@ -1,354 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.eagle.hadoop.metric;
-
-import org.junit.Assert;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.wso2.siddhi.core.ExecutionPlanRuntime;
-import org.wso2.siddhi.core.SiddhiManager;
-import org.wso2.siddhi.core.event.Event;
-import org.wso2.siddhi.core.query.output.callback.QueryCallback;
-import org.wso2.siddhi.core.stream.input.InputHandler;
-import org.wso2.siddhi.core.stream.output.StreamCallback;
-import org.wso2.siddhi.query.api.expression.constant.DoubleConstant;
-
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
-/**
- * Created on 1/17/16.
- */
-public class TestHadoopMetricSiddhiQL {
-
-    @Ignore
-    @Test
-    public void testNameNodeLag() throws Exception {
-        String ql = "define stream s (host string, timestamp long, metric string, component string, site string, value string);" +
-                " @info(name='query') " +
-                " from s[metric=='hadoop.namenode.dfs.lastwrittentransactionid' and host=='a' ]#window.externalTime(timestamp, 5 min) select * insert into tmp1;" +
-                " from s[metric=='hadoop.namenode.dfs.lastwrittentransactionid' and host=='b' ]#window.externalTime(timestamp, 5 min) select * insert into tmp2;" +
-                " from tmp1 , tmp2 select tmp1.timestamp as t1time, tmp2.timestamp as t2time, max(convert(tmp1.value, 'long')) - max(convert(tmp2.value, 'long')) as gap insert into tmp3;" +
-                " from tmp3[gap > 100] select * insert into tmp;"
-                ;
-
-        System.out.println("test name node log with multiple stream defined!");
-        testQL(ql, generateNameNodeLagEvents(), -1, true);
-    }
-
-    @Ignore
-    @Test
-    public void testNameNodeLag2_patternMatching() throws Exception {
-        String ql =
-            " define stream s (host string, timestamp long, metric string, component string, site string, value string); " +
-            " @info(name='query') " +
-            " from every a = s[metric=='hadoop.namenode.dfs.lastwrittentransactionid'] " +
-            "         -> b = s[metric=='hadoop.namenode.dfs.lastwrittentransactionid' and b.host != a.host " +
-                    " and (convert(a.value, 'long') + 100) < convert(value, 'long') ] " +
-            " within 5 min select a.host as hostA, b.host as hostB insert into tmp; ";
-
-        testQL(ql, generateNameNodeLagEvents(), -1);
-    }
-
-    private void testQL(String ql, List<Event> events, int i) throws Exception {
-        testQL(ql, events, i, false);
-    }
-
-    private void testQL(String ql, List<Event> events, int eventHappenCount, boolean useStreamCallback) throws InterruptedException {
-        SiddhiManager sm = new SiddhiManager();
-        ExecutionPlanRuntime runtime = sm.createExecutionPlanRuntime(ql);
-
-        InputHandler input = runtime.getInputHandler("s");
-
-        final AtomicInteger count = new AtomicInteger(0);
-        final CountDownLatch latch = new CountDownLatch(1);
-        // use stream call back or query callback
-        if (useStreamCallback) {
-            runtime.addCallback("tmp", new StreamCallback() {
-                AtomicInteger round = new AtomicInteger();
-
-                @Override
-                public void receive(Event[] events) {
-                    count.incrementAndGet();
-                    round.incrementAndGet();
-                    System.out.println("event round: " + round.get() + " event count : " + events.length);
-                    printEvents(events);
-                    latch.countDown();
-                }
-            });
-        } else {
-            runtime.addCallback("query", new QueryCallback() {
-                AtomicInteger round = new AtomicInteger();
-
-                @Override
-                public void receive(long timeStamp, Event[] inEvents, Event[] removeEvents) {
-                    count.incrementAndGet();
-                    round.incrementAndGet();
-                    System.out.println("event round: " + round.get() + " event count : " + inEvents.length);
-                    printEvents(inEvents);
-                    latch.countDown();
-                }
-            });
-        }
-
-        runtime.start();
-
-        for (Event e : events) {
-            input.send(e);
-        }
-
-        latch.await(10, TimeUnit.SECONDS);
-        Thread.sleep(3000);
-
-        System.out.println("callback count=" + count.get());
-        if (eventHappenCount >= 0) {
-            Assert.assertEquals(eventHappenCount, count.get());
-        } else {
-            Assert.assertTrue(count.get() > 0);
-        }
-
-        runtime.shutdown();
-        sm.shutdown();
-    }
-
-    private void printEvents(Event[] inEvents) {
-        for (Event e : inEvents) {
-            for(Object o : e.getData()) {
-                System.out.print(o);
-                System.out.print('\t');
-            }
-            System.out.println();
-        }
-    }
-
-    private List<Event> generateNameNodeLagEvents() {
-        List<Event> events = new LinkedList<>();
-
-        long base1 = System.currentTimeMillis();
-        long tbase1 = 1000;
-        long tbase2 = 1000;
-
-        int SIZE = 10;
-        // master / slave in sync, no events for these
-        for (int i = 0;i < SIZE; i++) {
-            base1 += 1000;
-            tbase1 += 100;
-            Event e = new Event();
-            e.setData(new Object[] {"a", base1, "hadoop.namenode.dfs.lastwrittentransactionid", "namenode", "sandbox", String.valueOf(tbase1)});
-            events.add(e);
-
-            tbase2 += 100;
-            e = new Event();
-            e.setData(new Object[] {"b", base1, "hadoop.namenode.dfs.lastwrittentransactionid", "namenode", "sandbox", String.valueOf(tbase2)});
-            events.add(e);
-        }
-
-
-        {
-            // make sure flush previous windows
-
-            base1 += 6 * 60 * 1000;
-            tbase1 = 3000;
-            Event e = new Event();
-            e.setData(new Object[]{"a", base1, "hadoop.namenode.dfs.lastwrittentransactionid", "namenode", "sandbox", String.valueOf(tbase1)});
-            events.add(e);
-
-            tbase2 = tbase1 + 110; // > 100, trigger an event
-            e = new Event();
-            e.setData(new Object[]{"b", base1, "hadoop.namenode.dfs.lastwrittentransactionid", "namenode", "sandbox", String.valueOf(tbase2)});
-            events.add(e);
-
-            // trigger event
-//            base1 = base1 + 6 * 60 * 1000;
-//            e = new Event();
-//            e.setData(new Object[]{"b", base1, "hadoop.namenode.dfs.lastwrittentransactionid", "namenode", "sandbox", String.valueOf(tbase2)});
-//            events.add(e);
-        }
-
-        return events;
-    }
-
-    /**
-    E.g. Alert if temperature of a room increases by 5 degrees within 10 min.
-            from every( e1=TempStream ) -> e2=TempStream[e1.roomNo==roomNo and (e1.temp + 5) <= temp ]
-                within 10 min
-            select e1.roomNo, e1.temp as initialTemp, e2.temp as finalTemp
-            insert into AlertStream;
-     */
-    @Ignore
-    @Test
-    public void testCase4_LiveDataNodeJoggle() throws Exception {
-
-        String ql = "define stream s (host string, timestamp long, metric string, component string, site string, value string);" +
-                " @info(name='query') " +
-                " from every (e1 = s[metric == \"hadoop.namenode.fsnamesystemstate.numlivedatanodes\" ]) -> " +
-                "             e2 = s[metric == e1.metric and host == e1.host and (convert(e1.value, \"long\") + 5) <= convert(value, \"long\") ]" +
-                " within 5 min " +
-                " select e1.metric, e1.host, e1.value as lowNum, e1.timestamp as start, e2.value as highNum, e2.timestamp as end " +
-                " insert into tmp;"
-                ;
-
-        testQL(ql, generateDataNodeJoggleEvents(), -1);
-    }
-
-    private List<Event> generateDataNodeJoggleEvents() {
-        List<Event> events = new LinkedList<>();
-
-        long base1 = System.currentTimeMillis();
-        long tbase1 = 1000;
-        long tbase2 = 5000;
-
-        int SIZE = 10;
-        // master / slave in sync
-        for (int i = 0;i < SIZE; i++) {
-            base1 += 1000;
-
-            Event e = new Event();
-            e.setData(new Object[] {"a", base1, "hadoop.namenode.fsnamesystemstate.numlivedatanodes", "namenode", "sandbox", String.valueOf(tbase1)});
-            events.add(e);
-
-            // inject b events, to test host a not disturb by this metric stream
-            e = new Event();
-            e.setData(new Object[] {"b", base1, "hadoop.namenode.fsnamesystemstate.numlivedatanodes", "namenode", "sandbox", String.valueOf(tbase2)});
-            events.add(e);
-        }
-
-        {
-            // insert an invalid
-            base1 += 1 * 60 * 1000;
-            tbase1 = 3000;
-            Event e = new Event();
-            e.setData(new Object[]{"a", base1, "hadoop.namenode.fsnamesystemstate.numlivedatanodes", "namenode", "sandbox", String.valueOf(tbase1)});
-            events.add(e);
-
-            // trigger event, we dont really care about this event value, just make sure above metri triggered
-            base1 = base1 + 100;
-            e = new Event();
-            e.setData(new Object[]{"b", base1, "hadoop.namenode.dfs.lastwrittentransactionid", "namenode", "sandbox", String.valueOf(tbase2)});
-            events.add(e);
-        }
-
-        return events;
-    }
-
-    @Test
-    public void testMissingBlocks() throws Exception {
-        String sql = " define stream s (host string, timestamp long, metric string, component string, site string, value double); " +
-                " @info(name='query') " +
-                " from s[metric == \"hadoop.namenode.dfs.missingblocks\" and convert(value, 'long') > 0]#window.externalTime(timestamp, 10 min) select metric, host, value, timestamp, component, site insert into tmp; ";
-
-        System.out.println(sql);
-
-        testQL(sql, generateMBEvents(), -1);
-    }
-
-    private List<Event> generateMBEvents() {
-        List<Event> events = new LinkedList<>();
-
-        long base1 = System.currentTimeMillis();
-        int SIZE = 3;
-        // master / slave in sync
-        for (int i = 0;i < SIZE; i++) {
-            base1 = base1 +1000;
-
-            Event e = new Event();
-            e.setData(new Object[] {"a", base1, "hadoop.namenode.dfs.missingblocks", "namenode", "sandbox", 0.0});
-            events.add(e);
-
-            // inject b events, to test host a not disturb by this metric stream
-            e = new Event();
-            e.setData(new Object[] {"b", base1, "hadoop.namenode.dfs.missingblocks", "namenode", "sandbox", 1.0});
-            events.add(e);
-        }
-        return events;
-    }
-
-    @Test
-    public void testLastCheckpointTime() throws Exception {
-        String ql = " define stream s (host string, timestamp long, metric string, component string, site string, value double); " +
-                " @info(name='query') " +
-                " from s[metric == \"hadoop.namenode.dfs.lastcheckpointtime\" and (convert(value, \"long\") + 18000000) < timestamp]#window.externalTime(timestamp ,10 min) select metric, host, value, timestamp, component, site insert into tmp;";
-
-        testQL(ql, generateLCPEvents(), -1);
-    }
-
-    private List<Event> generateLCPEvents() {
-        List<Event> events = new LinkedList<>();
-
-        long base1 = System.currentTimeMillis();
-        int SIZE = 3;
-        // master / slave in sync
-        for (int i = 0;i < SIZE; i++) {
-            base1 = base1 +1000;
-
-            Event e = new Event();
-            e.setData(new Object[] {"a", base1, "hadoop.namenode.dfs.lastcheckpointtime", "namenode", "sandbox", Double.valueOf(base1 - 18000000 - 1)});
-            events.add(e);
-
-            // inject b events, to test host a not disturb by this metric stream
-            e = new Event();
-            e.setData(new Object[] {"b", base1, "hadoop.namenode.dfs.lastcheckpointtime", "namenode", "sandbox", Double.valueOf(base1 - 18000000 - 1)});
-            events.add(e);
-        }
-        return events;
-    }
-
-    @Test
-    public void testNoActiveNamenodeFor3Times() throws Exception {
-        String sql = " define stream s (host string, timestamp long, metric string, component string, site string, value double); " +
-                " @info(name='query') " +
-                " from s[metric == \"hadoop.namenode.hastate.active.count\"]#window.length(3) select  metric, host, value, timestamp, component, site, avg(convert(value, \"long\")) as avgValue, count() as cnt having avgValue==0 and cnt==3  insert into tmp;";
-//        " from s[metric == \"hadoop.namenode.hastate.active.count\"]#window.length(3) select  metric, host, value, timestamp, component, site, min(convert(value, \"long\")) as minValue, max(convert(value, \"long\")) as maxValue, count() as cnt having minValue==0 and maxValue==0 and cnt==3  insert into tmp;";
-
-        System.out.println(sql);
-
-        testQL(sql, generateMBEvents_times_0(1), 0);
-        testQL(sql, generateMBEvents_times_0(2), 0);
-        testQL(sql, generateMBEvents_times_0(3), 1);
-    }
-
-    private List<Event> generateMBEvents_times_0(int times_0) {
-        List<Event> events = new LinkedList<>();
-
-        long base1 = System.currentTimeMillis();
-        double[] values = new double[3];
-        if(times_0 == 1){
-            values[0] = 1.0;
-            values[1] = 0.0;
-            values[2] = 1.0;
-        }else if(times_0 == 2){
-            values[0] = 1.0;
-            values[1] = 0.0;
-            values[2] = 0.0;
-        }else if(times_0 == 3){
-            values[0] = 0.0;
-            values[1] = 0.0;
-            values[2] = 0.0;
-        }
-        for(int i=0; i<3; i++) {
-            // master / slave in sync
-            base1 = base1 + 1000;
-            Event e = new Event();
-            e.setData(new Object[]{"a", base1, "hadoop.namenode.hastate.active.count", "namenode", "sandbox", values[i]});
-            events.add(e);
-        }
-        return events;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/46afec39/eagle-hadoop-metric/src/test/resources/cassandra.json
----------------------------------------------------------------------
diff --git a/eagle-hadoop-metric/src/test/resources/cassandra.json b/eagle-hadoop-metric/src/test/resources/cassandra.json
deleted file mode 100644
index 645e314..0000000
--- a/eagle-hadoop-metric/src/test/resources/cassandra.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-  "host": "/192.168.6.227",
-  "source": "/192.168.6.227",
-  "user": "jaspa",
-  "timestamp": 1455574202864,
-  "category": "QUERY",
-  "type": "CQL_SELECT",
-  "ks": "dg_keyspace",
-  "cf": "customer_details",
-  "operation": "CQL_SELECT",
-  "masked_columns": ["bank", "ccno", "email", "ip", "name", "sal", "ssn ", "tel", "url"],
-  "other_columns": ["id", "npi"]
-}
\ No newline at end of file


Mime
View raw message