ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From smoha...@apache.org
Subject ambari git commit: Updates for LOGSEARCH stack definition (Oliver Szabo via smohanty)
Date Fri, 04 Mar 2016 19:18:22 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-dev-logsearch 69adbd527 -> c63b0d042


Updates for LOGSEARCH stack definition (Oliver Szabo via smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c63b0d04
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c63b0d04
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c63b0d04

Branch: refs/heads/branch-dev-logsearch
Commit: c63b0d042f102452648ab18c2c6b811504cd3543
Parents: 69adbd5
Author: Sumit Mohanty <smohanty@hortonworks.com>
Authored: Fri Mar 4 11:18:02 2016 -0800
Committer: Sumit Mohanty <smohanty@hortonworks.com>
Committed: Fri Mar 4 11:18:02 2016 -0800

----------------------------------------------------------------------
 .../0.5.0/configuration/logfeeder-env.xml       |   1 -
 .../configuration/logfeeder-input-configs.xml   |  10 ++
 .../0.5.0/configuration/logfeeder-log4j.xml     |   3 +-
 .../0.5.0/configuration/logsearch-app-log4j.xml |  16 +-
 .../0.5.0/configuration/logsearch-config.xml    |  18 +-
 .../0.5.0/configuration/logsearch-env.xml       |   9 +
 .../0.5.0/configuration/solr-config.xml         |  11 +-
 .../LOGSEARCH/0.5.0/configuration/solr-env.xml  |   2 +-
 .../LOGSEARCH/0.5.0/metainfo.xml                |   2 -
 .../LOGSEARCH/0.5.0/metrics.json                |  59 -------
 .../0.5.0/package/scripts/logfeeder.py          |  23 +++
 .../0.5.0/package/scripts/logsearch.py          |  74 ++++++--
 .../LOGSEARCH/0.5.0/package/scripts/params.py   |  23 ++-
 .../LOGSEARCH/0.5.0/package/scripts/solr.py     |  24 ++-
 .../LOGSEARCH/0.5.0/widgets.json                | 173 -------------------
 15 files changed, 175 insertions(+), 273 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml
index 60272c9..6e03f44 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-env.xml
@@ -89,7 +89,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-logfeeder.checkpoint.folder={{logfeeder_pid_dir}}
 metrics.collector.hosts=http://{{metrics_collector_hosts}}:{{metrics_collector_port}}/ws/v1/timeline/metrics
     </value>
   </property>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-input-configs.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-input-configs.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-input-configs.xml
index fe7ab1f..8629fb5 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-input-configs.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-input-configs.xml
@@ -196,6 +196,11 @@
 			"path":"{{logfeeder_log_dir}}/logfeeder.log"
 		},
 		{
+			"type":"logsearch_perf",
+			"rowtype":"service",
+			"path":"{{logsearch_log_dir}}/logsearch-performance.log"
+		},
+		{
 			"type":"ranger_admin",
 			"rowtype":"service",
 			"path":"{{ranger_admin_log_dir}}/xa_portal.log"
@@ -635,6 +640,7 @@
 					"type":[
 						"logsearch_app",
 						"logsearch_feeder",
+						"logsearch_perf",
 						"ranger_admin",
 						"ranger_dbpatch"
 					]
@@ -924,6 +930,8 @@
 			"destination":"solr",
 			"zk_hosts":"{{zookeeper_quorum}}{{solr_znode}}",
 			"collection":"{{solr_collection_service_logs}}",
+			"number_of_shards": "{{logsearch_numshards}}",
+			"splits_interval_mins": "{{service_logs_collection_splits_interval_mins}}",
 			"conditions":{
 				"fields":{
 					"rowtype":[
@@ -941,6 +949,8 @@
 			"destination":"solr",
 			"zk_hosts":"{{zookeeper_quorum}}{{solr_znode}}",
 			"collection":"{{solr_collection_audit_logs}}",
+			"number_of_shards": "{{logsearch_numshards}}",
+			"splits_interval_mins": "{{audit_logs_collection_splits_interval_mins}}",
 			"conditions":{
 				"fields":{
 					"rowtype":[

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
index 92edb82..da0632b 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logfeeder-log4j.xml
@@ -54,8 +54,7 @@
   &lt;/appender&gt;
 
   &lt;appender name="rolling_file" class="org.apache.log4j.RollingFileAppender"&gt;

-    &lt;param name="file" value="{{logfeeder_log_dir}}/logfeeder.log" /&gt; 
-    &lt;param name="datePattern"  value="'.'yyyy-MM-dd" /&gt; 
+    &lt;param name="file" value="{{logfeeder_log_dir}}/logfeeder.log" /&gt;
     &lt;param name="append" value="true" /&gt; 
     &lt;param name="maxFileSize" value="10MB" /&gt; 
     &lt;param name="maxBackupIndex" value="10" /&gt; 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-app-log4j.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-app-log4j.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-app-log4j.xml
index bb2c902..002e2b2 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-app-log4j.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-app-log4j.xml
@@ -56,7 +56,6 @@
 
   &lt;appender name="rolling_file" class="org.apache.log4j.RollingFileAppender"&gt;

     &lt;param name="file" value="{{logsearch_log_dir}}/logsearch.log" /&gt; 
-    &lt;param name="datePattern"  value="'.'yyyy-MM-dd" /&gt; 
     &lt;param name="append" value="true" /&gt; 
     &lt;param name="maxFileSize" value="10MB" /&gt; 
     &lt;param name="maxBackupIndex" value="10" /&gt; 
@@ -65,6 +64,21 @@
     &lt;/layout&gt; 
   &lt;/appender&gt; 
 
+  &lt;appender name="performance_analyzer" class="org.apache.log4j.RollingFileAppender"&gt;
+    &lt;param name="file" value="{{logsearch_log_dir}}/logsearch-performance.log" /&gt;
+    &lt;param name="Threshold" value="info" /&gt;
+    &lt;param name="append" value="true" /&gt;
+    &lt;param name="maxFileSize" value="10MB" /&gt; 
+    &lt;param name="maxBackupIndex" value="10" /&gt; 
+    &lt;layout class="org.apache.log4j.PatternLayout"&gt;
+      &lt;param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" /&gt;
+    &lt;/layout&gt;
+  &lt;/appender&gt;
+  
+  &lt;logger name="org.apache.ambari.logsearch.perfomance" additivity="false"&gt;
+   &lt;appender-ref ref="performance_analyzer" /&gt;
+  &lt;/logger&gt;
+
   &lt;category name="org.apache.ambari.logsearch" additivity="false"&gt;
     &lt;priority value="info" /&gt;
     &lt;appender-ref ref="rolling_file" /&gt;

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-config.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-config.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-config.xml
index 0f89cf1..949223b 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-config.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-config.xml
@@ -22,6 +22,12 @@
 <configuration>
 
   <property>
+    <name>audit_logs_collection_split_interval_mins</name>
+    <value>15</value>
+    <description>Will switch the shard after the interval specified. Valid values are
none and greater than 1</description>
+  </property>
+
+  <property>
     <name>solr_collection_service_logs</name>
     <value>hadoop_logs</value>
     <description>Name for the service logs collection</description>
@@ -49,13 +55,19 @@
     <name>solr_audit_logs_zk_node</name>
     <value>{solr_znode}</value>
     <description>Only needed if using custom solr cloud. E.g. /audit_logs</description>
-  </property>   
+  </property>
+
+  <property>
+    <name>service_logs_collection_split_interval_mins</name>
+    <value>15</value>
+    <description>Will create multiple collections and use alias. Valid values are single,hour_week</description>
+  </property>
 
   <property>
     <name>logsearch_collection_numshards</name>
-    <value>1</value>
+    <value>10</value>
     <description>Number of shards for Solr collections</description>
-  </property>   
+  </property>
   
   <property>
     <name>logsearch_collection_rep_factor</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
index 8e9a63d..335beeb 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/logsearch-env.xml
@@ -88,10 +88,19 @@ solr.zkhosts={{zookeeper_quorum}}{{solr_znode}}
 solr.core.logs={{logsearch_collection_service_logs}}
 solr.core.history=history
 
+solr.service_logs.split_interval_mins={{service_logs_collection_splits_interval_mins}}
+solr.service_logs.shards={{logsearch_numshards}}
+solr.service_logs.replication_factor={{logsearch_repfactor}}
+
 #Audit logs
 auditlog.solr.zkhosts={{solr_audit_logs_zk_quorum}}{{solr_audit_logs_zk_node}}
 auditlog.solr.core.logs={{solr_collection_audit_logs}}
 auditlog.solr.url={{solr_audit_logs_url}}
+
+solr.audit_logs.split_interval_mins={{audit_logs_collection_splits_interval_mins}}
+solr.audit_logs.shards={{logsearch_numshards}}
+solr.audit_logs.replication_factor={{logsearch_repfactor}}
+
     </value>
   </property>
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-config.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-config.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-config.xml
index 2505632..0868c56 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-config.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-config.xml
@@ -28,26 +28,25 @@
     <name>solr.cloudmode</name>
     <value>true</value>
     <description>Whether Solr should be started in Cloud mode</description>
-  </property>  
+  </property>      -->
   
   <property>
     <name>solr.dir</name>
     <value>/opt/solr</value>
     <description>Path to Solr root. If HDPSearch is selected, it will be installed
under /opt/lucidworks-hdpsearch/solr/</description>
-  </property>  
+  </property>
 
   <property>
     <name>solr.download.location</name>
     <value>HDPSEARCH</value>
-    <description>Location to download Solr from (e.g. https://archive.apache.org/dist/lucene/solr/5.3.0/solr-5.3.0.tgz).
Set this to HDPSEARCH to download HDPSearch from yum repo instead</description>
-  </property>     -->
-  
+    <description>Location to download Solr from (e.g. http://apache.mirrors.lucidnetworks.net/lucene/solr/5.2.1/solr-5.2.1.tgz).
Set this to HDPSEARCH to download HDPSearch from yum repo instead</description>
+  </property>
 
   <property>
     <name>solr.znode</name>
     <value>/logsearch</value>
     <description>Zookeeper znode</description>
-  </property>  
+  </property>
 
   <property>
     <name>solr.minmem</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-env.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-env.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-env.xml
index a1d13ba..24e74c6 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-env.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/configuration/solr-env.xml
@@ -95,7 +95,7 @@ GC_TUNE="-XX:NewRatio=3 \
 ZK_HOST="{{zookeeper_quorum}}{{solr_znode}}"
 
 # Set the ZooKeeper client timeout (for SolrCloud mode)
-#ZK_CLIENT_TIMEOUT="15000"
+ZK_CLIENT_TIMEOUT="60000"
 
 # By default the start script uses "localhost"; override the hostname here
 # for production SolrCloud environments to control the hostname exposed to cluster state

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
index 3429304..167d18f 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metainfo.xml
@@ -111,8 +111,6 @@
         <config-type>logfeeder-config</config-type>
       </configuration-dependencies>
       <restartRequiredAfterChange>false</restartRequiredAfterChange>
-      <widgetsFileName>widgets.json</widgetsFileName>
-      <metricsFileName>metrics.json</metricsFileName>
     </service>
   </services>
 </metainfo>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json
deleted file mode 100644
index a6c20c7..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/metrics.json
+++ /dev/null
@@ -1,59 +0,0 @@
-{
-  "LOGSEARCH_LOGFEEDER": {
-    "Component": [
-      {
-        "type": "ganglia",
-        "metrics": {
-          "default": {
-            "metrics/filter/error/grok": {
-              "metric": "filter.error.grok",
-              "pointInTime": false,
-              "temporal": true
-            },
-            "metrics/filter/error/truncate": {
-              "metric": "filter.error.truncate",
-              "pointInTime": false,
-              "temporal": true
-            },
-            "metrics/input/files/count": {
-              "metric": "input.files.count",
-              "pointInTime": true,
-              "temporal": true
-            },
-            "metrics/input/files/read_lines": {
-              "metric": "input.files.read_lines",
-              "pointInTime": false,
-              "temporal": true
-            },
-            "metrics/input/files/read_bytes": {
-              "metric": "input.files.read_bytes",
-              "pointInTime": false,
-              "temporal": true
-            },
-            "metrics/output/kafka/write_logs": {
-              "metric": "output.kafka.write_logs",
-              "pointInTime": false,
-              "temporal": true
-            },
-            "metrics/output/kafka/write_bytes": {
-              "metric": "output.kafka.write_bytes",
-              "pointInTime": false,
-              "temporal": true
-            },
-            "metrics/output/solr/write_logs": {
-              "metric": "output.solr.write_logs",
-              "pointInTime": false,
-              "temporal": true
-            },
-            "metrics/output/solr/write_bytes": {
-              "metric": "output.solr.write_bytes",
-              "pointInTime": false,
-              "temporal": true
-            }
-
-          }
-        }
-      }
-    ]
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
index 052ce52..194f71a 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logfeeder.py
@@ -66,6 +66,10 @@ class LogFeeder(Script):
     import params  
     if params.logfeeder_downloadlocation == 'RPM':
       Execute('rpm -ivh http://TBD.rpm')
+    elif len(params.logfeeder_downloadlocation) > 5 and params.logfeeder_downloadlocation[:5]
== 'file:' :
+      local_file = params.logfeeder_downloadlocation.replace(params.logfeeder_downloadlocation[:5],
'')
+      Execute('cd ' + params.logfeeder_dir + '; cp ' + local_file + ' .', user=params.logfeeder_user)
+      Execute('cd ' + params.logfeeder_dir + '; tar -xvf logsearch-logfeeder.tgz', user=params.logfeeder_user)
     else:  
       Execute('cd ' + params.logfeeder_dir + '; wget ' + params.logfeeder_downloadlocation
+ ' -O logfeeder.tar.gz -a ' + params.logfeeder_log, user=params.logfeeder_user)
       Execute('cd ' + params.logfeeder_dir + '; tar -xvf logfeeder.tar.gz', user=params.logfeeder_user)
   
@@ -73,7 +77,26 @@ class LogFeeder(Script):
     
   def configure(self, env, upgrade_type=None):
     import params
+    import status_params
     env.set_params(params)
+
+
+    #Duplicated here, because if the machine restarts /var/run folder is wiped out
+    Directory([params.logfeeder_log_dir, status_params.logfeeder_pid_dir, params.logfeeder_dir],
+              mode=0755,
+              cd_access='a',
+              owner=params.logfeeder_user,
+              group=params.logfeeder_group,
+              create_parents=True
+              )
+
+
+    File(params.logfeeder_log,
+         mode=0644,
+         owner=params.logfeeder_user,
+         group=params.logfeeder_group,
+         content=''
+         )
     
     #write content in jinja text field to system.properties
     env_content=InlineTemplate(params.logfeeder_env_content)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
index 3f4f02e..944359d 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/logsearch.py
@@ -17,7 +17,7 @@ limitations under the License.
 
 """
 
-import sys, os, pwd, grp, signal, time
+import sys, os, pwd, grp, signal, time, random
 from resource_management import *
 
 class LogSearch(Script):
@@ -66,6 +66,10 @@ class LogSearch(Script):
     import params
     if params.logsearch_downloadlocation == 'RPM':
       Execute('rpm -ivh http://s3.amazonaws.com/dev2.hortonworks.com/ashishujjain/logsearch/logsearch_2_3_2_0_2950-0.0.1.2.3.2.0-2950.el6.x86_64.rpm')
+    elif len(params.logsearch_downloadlocation) > 5 and params.logsearch_downloadlocation[:5]
== 'file:':
+      local_file = params.logsearch_downloadlocation.replace(params.logsearch_downloadlocation[:5],'')
+      Execute('cd ' + params.logsearch_dir + '; cp ' + local_file + ' .', user=params.logsearch_user)
+      Execute('cd ' + params.logsearch_dir + '; tar -xvf logsearch-portal.tar.gz', user=params.logsearch_user)
     else:
       Execute('cd ' + params.logsearch_dir + '; wget ' + params.logsearch_downloadlocation
+ ' -O logsearch-portal.tar.gz -a ' + params.logsearch_log, user=params.logsearch_user)
       Execute('cd ' + params.logsearch_dir + '; tar -xvf logsearch-portal.tar.gz', user=params.logsearch_user)
@@ -73,8 +77,27 @@ class LogSearch(Script):
 
   def configure(self, env, upgrade_type=None):
     import params
+    import status_params
+
     env.set_params(params)
 
+    #Duplicated in configure, because if the machine restart /var/run folder is deleted
+    Directory([params.logsearch_log_dir, status_params.logsearch_pid_dir, params.logsearch_dir],
+              mode=0755,
+              cd_access='a',
+              owner=params.logsearch_user,
+              group=params.logsearch_group,
+              create_parents=True
+              )
+
+
+    File(params.logsearch_log,
+         mode=0644,
+         owner=params.logsearch_user,
+         group=params.logsearch_group,
+         content=''
+         )
+
     #write content in jinja text field to system.properties
     env_content=InlineTemplate(params.logsearch_env_content)    
     File(format("{params.logsearch_dir}/classes/system.properties"), content=env_content,
owner=params.logsearch_user)    
@@ -124,29 +147,54 @@ class LogSearch(Script):
     Execute('echo mapred_log_dir_prefix '+params.mapred_log_dir_prefix+' >> ' + params.logsearch_log,
user=params.logsearch_user)
     Execute('echo zk_log_dir '+params.zk_log_dir+' >> ' + params.logsearch_log, user=params.logsearch_user)
 
-    
+    my_random = random.random()
+
+    #Check whether we need to add service log config to zookeeper.
+    tmp_folder='/tmp/solr_config_hadoop_logs_' + str(my_random)
+    cmd = format('{cloud_scripts}/zkcli.sh -zkhost {zookeeper_quorum}{solr_znode} -cmd downconfig
-confdir ' + tmp_folder + ' -confname hadoop_logs')
+    Execute(cmd, ignore_failures=True)
+    if not os.path.exists( tmp_folder ):
+      Execute ('echo "Adding config for service logs"')
+      #Adding service logs config to zookeeper
+      cmd = format('{cloud_scripts}/zkcli.sh -zkhost {zookeeper_quorum}{solr_znode} -cmd
upconfig -confdir {logsearch_dir}/solr_configsets/hadoop_logs/conf -confname hadoop_logs')
+      Execute(cmd)
+    else:
+      Execute ('echo "Config for hadoop_logs already present in zookeeper. Will not add it"')
+
     #create prerequisite Solr collections, if not already exist
     #cmd = params.solr_bindir+'solr create -c '+params.logsearch_collection_service_logs+'
-d '+params.logsearch_dir+'/solr_configsets/hadoop_logs/conf -s '+params.logsearch_numshards+'
-rf ' + params.logsearch_repfactor    
-    cmd = format('SOLR_INCLUDE={logsearch_solr_conf}/solr.in.sh {solr_bindir}/solr create
-c {solr_collection_service_logs} -d {logsearch_dir}/solr_configsets/hadoop_logs/conf -s {logsearch_numshards}
-rf {logsearch_repfactor}')
-    Execute('echo '  + cmd)
-    Execute(cmd, ignore_failures=True)
+    #cmd = format('SOLR_INCLUDE={logsearch_solr_conf}/solr.in.sh {solr_bindir}/solr create
-c {solr_collection_service_logs} -d {logsearch_dir}/solr_configsets/hadoop_logs/conf -s {logsearch_numshards}
-rf {logsearch_repfactor}')
+    #Execute('echo '  + cmd)
+    #Execute(cmd, ignore_failures=True)
 
     #cmd = params.solr_bindir+'solr create -c history -d '+params.logsearch_dir+'/solr_configsets/history/conf
-s '+params.logsearch_numshards+' -rf ' + params.logsearch_repfactor
     cmd = format('SOLR_INCLUDE={logsearch_solr_conf}/solr.in.sh {solr_bindir}/solr create
-c history -d {logsearch_dir}/solr_configsets/history/conf -s {logsearch_numshards} -rf {logsearch_repfactor}')
     Execute('echo '  + cmd)
     Execute(cmd, ignore_failures=True)
 
-    if not(params.solr_audit_logs_use_ranger):
-      cmd = format('SOLR_INCLUDE={logsearch_solr_conf}/solr.in.sh {solr_bindir}/solr create
-c {solr_collection_audit_logs} -d {logsearch_dir}/solr_configsets/audit_logs/conf -s {logsearch_numshards}
-rf {logsearch_repfactor}')
-      Execute('echo '  + cmd)
-      Execute(cmd, ignore_failures=True)
-    						 
+    #Check whether we need to add service log config to zookeeper.
+    tmp_folder='/tmp/solr_config_audit_logs_' + str(my_random)
+    cmd = format('{cloud_scripts}/zkcli.sh -zkhost {zookeeper_quorum}{solr_znode} -cmd downconfig
-confdir ' + tmp_folder + ' -confname audit_logs')
+    Execute(cmd, ignore_failures=True)
+    if not os.path.exists( tmp_folder ):
+      Execute ('echo "Adding config for  audit_logs"')
+      #Adding service logs config to zookeeper
+      cmd = format('{cloud_scripts}/zkcli.sh -zkhost {zookeeper_quorum}{solr_znode} -cmd
upconfig -confdir {logsearch_dir}/solr_configsets/audit_logs/conf -confname audit_logs')
+      Execute(cmd)
+    else:
+      Execute ('echo "Config for audit_logs already present in zookeeper. Will not add it"')
+
+#    if not(params.solr_audit_logs_use_ranger):
+#      cmd = format('SOLR_INCLUDE={logsearch_solr_conf}/solr.in.sh {solr_bindir}/solr create
-c {solr_collection_audit_logs} -d {logsearch_dir}/solr_configsets/audit_logs/conf -s {logsearch_numshards}
-rf {logsearch_repfactor}')
+#      Execute('echo '  + cmd)
+#      Execute(cmd, ignore_failures=True)
+
     Execute('chmod -R ugo+r ' + params.logsearch_dir + '/solr_configsets')
-    
+
     Execute('find '+params.service_packagedir+' -iname "*.sh" | xargs chmod +x')
     cmd = params.service_packagedir + '/scripts/start_logsearch.sh ' + params.logsearch_dir
+ ' ' + params.logsearch_log + ' ' + status_params.logsearch_pid_file + ' ' + params.java64_home
+ ' ' + '-Xmx' + params.logsearch_app_max_mem
-  
-    Execute('echo "Running cmd: ' + cmd + '"')    
+
+    Execute('echo "Running cmd: ' + cmd + '"')
     Execute(cmd, user=params.logsearch_user)
 
   #Called to stop the service using the pidfile

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
index 3289708..a350f8c 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/params.py
@@ -68,18 +68,17 @@ smart_solr_datadir = config['configurations']['alpha-smart-config']['solr_datadi
 #Solr configs
 #####################################
 
-# Only supporting HDPsearch and SolrCloud mode - so hardcode those options
+# Only supporting SolrCloud mode - so hardcode those option
 solr_cloudmode='true'
-solr_downloadlocation='HDPSEARCH'
+solr_downloadlocation=config['configurations']['solr-config']['solr.download.location']
+solr_dir = config['configurations']['solr-config']['solr.dir']
 #solr_cloudmode = config['configurations']['solr-config']['solr.cloudmode']
-#solr_downloadlocation = config['configurations']['solr-config']['solr.download.location']
-#solr_dir = config['configurations']['solr-config']['solr.dir']
 
 solr_znode = config['configurations']['solr-config']['solr.znode']
 solr_port = config['configurations']['solr-env']['solr.port']
 solr_min_mem = format(config['configurations']['solr-config']['solr.minmem'])
 solr_max_mem = format(config['configurations']['solr-config']['solr.maxmem'])
-solr_instance_count = len(config['clusterHostInfo']['zookeeper_hosts'])
+solr_instance_count = len(config['clusterHostInfo']['logsearch_solr_hosts'])
 logsearch_solr_conf = config['configurations']['solr-config']['logsearch.solr.conf']
 logsearch_solr_datadir = format(config['configurations']['solr-config']['logsearch.solr.datadir'])
 logsearch_solr_data_resources_dir = os.path.join(logsearch_solr_datadir,'resources')
@@ -87,6 +86,9 @@ logsearch_service_logs_max_retention = config['configurations']['logsearch-confi
 logsearch_audit_logs_max_retention = config['configurations']['logsearch-config']['logsearch_audit_logs_max_retention']
 logsearch_app_max_mem = config['configurations']['logsearch-config']['logsearch_app_max_mem']
 
+audit_logs_collection_splits_interval_mins = config['configurations']['logsearch-config']['audit_logs_collection_split_interval_mins']
+service_logs_collection_splits_interval_mins = config['configurations']['logsearch-config']['service_logs_collection_split_interval_mins']
+
 zookeeper_port=default('/configurations/zoo.cfg/clientPort', None)
 #get comma separated list of zookeeper hosts from clusterHostInfo
 index = 0 
@@ -135,13 +137,16 @@ if logsearch_downloadlocation == 'RPM':
 else:  
   logsearch_dir = config['configurations']['logsearch-env']['logsearch_dir']
 
-
-
 logsearch_downloadlocation = config['configurations']['logsearch-env']['logsearch_download_location']
 logsearch_collection_service_logs = default('/configurations/logsearch-config/logsearch_collection_service_logs',
'hadoop_logs')
 logsearch_collection_audit_logs = default('/configurations/logsearch-config/logsearch_collection_audit_logs',
'audit_logs')
-#logsearch_numshards = str(config['configurations']['logsearch-config']['logsearch_collection_numshards'])
-logsearch_numshards = format(str(solr_instance_count))
+logsearch_numshards_config = config['configurations']['logsearch-config']['logsearch_collection_numshards']
+
+if logsearch_numshards_config > 0:
+  logsearch_numshards = str(logsearch_numshards_config)
+else:
+  logsearch_numshards = format(str(solr_instance_count))
+
 logsearch_repfactor = str(config['configurations']['logsearch-config']['logsearch_collection_rep_factor'])
 
 solr_collection_service_logs = default('/configurations/logsearch-config/solr_collection_service_logs',
'hadoop_logs')

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/solr.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/solr.py
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/solr.py
index ec8eeb2..3dc2a2e 100644
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/solr.py
+++ b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/package/scripts/solr.py
@@ -35,7 +35,7 @@ class Solr(Script):
     Execute('find '+params.service_packagedir+' -iname "*.sh" | xargs chmod +x')
 
     try: grp.getgrnam(params.solr_group)
-    except KeyError: Group(group_name=params.solr_group) 
+    except KeyError: Group(group_name=params.solr_group)
     
     try: pwd.getpwnam(params.solr_user)
     except KeyError: User(username=params.solr_user, 
@@ -81,7 +81,7 @@ class Solr(Script):
       Execute('cd ' + params.solr_dir + '; ln -s solr-* latest', user=params.solr_user)
     
     #ensure all solr files owned   by solr
-    Execute('chown -R '+params.solr_user + ':' + params.solr_group + ' ' + params.solr_dir)
           
+    Execute('chown -R '+ params.solr_user + ':' + params.solr_group + ' ' + params.solr_dir)
        
     Execute ('echo "Solr install complete"')
 
@@ -89,8 +89,26 @@ class Solr(Script):
 
   def configure(self, env, upgrade_type=None):
     import params
+    import status_params
+
     env.set_params(params)
-    
+
+    #This is duplicated because if the machine restarts, the /var/run folder is deleted
+    Directory([params.solr_log_dir, status_params.solr_piddir, params.solr_dir, params.logsearch_solr_conf,
params.logsearch_solr_datadir, params.logsearch_solr_data_resources_dir],
+              mode=0755,
+              cd_access='a',
+              owner=params.solr_user,
+              group=params.solr_group,
+              create_parents=True
+              )
+
+    File(params.solr_log,
+         mode=0644,
+         owner=params.solr_user,
+         group=params.solr_group,
+         content=''
+         )
+
     #write content in jinja text field to solr.in.sh
     env_content=InlineTemplate(params.solr_env_content)
     File(format("{logsearch_solr_conf}/solr.in.sh"), content=env_content, owner=params.solr_user)
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/c63b0d04/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/widgets.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/widgets.json
b/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/widgets.json
deleted file mode 100644
index 379122a..0000000
--- a/ambari-server/src/main/resources/common-services/LOGSEARCH/0.5.0/widgets.json
+++ /dev/null
@@ -1,173 +0,0 @@
-{
-    "layouts": [
-	{
-	    "layout_name": "default_logsearch_dashboard",
-	    "display_name": "Logsearch Dashboard",
-	    "section_name": "LOGSEARCH_SUMMARY",
-	    "widgetLayoutInfo": [
-		{
-		    "widget_name": "Files Monitoring",
-		    "description": "Files that are been monitored",
-		    "widget_type": "NUMBER",
-		    "is_visible": true,
-		    "metrics": [
-			{
-			    "name": "input.files.count",
-			    "metric_path": "metrics/input/files/count",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			}
-		    ],
-		    "values": [
-			{
-			    "name": "Files count",
-			    "value": "${input.files.count}"
-			}
-		    ],
-		    "properties": {
-		    }
-		},
-		{
-		    "widget_name": "Logs Published",
-		    "description": "Logs Published",
-		    "widget_type": "GRAPH",
-		    "is_visible": true,
-		    "metrics": [
-			{
-			    "name": "output.solr.write_logs._sum",
-			    "metric_path": "metrics/output/solr/write_logs",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			},
-			{
-			    "name": "output.kafka.write_logs._sum",
-			    "metric_path": "metrics/output/kafka/write_logs",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			}
-		    ],
-		    "values": [
-			{
-			    "name": "Logs to Solr",
-			    "value": "${output.solr.write_logs._sum/1000}"
-			},
-			{
-			    "name": "Logs to Kafka",
-			    "value": "${output.kafka.write_logs._sum/1000}"
-			}
-		    ],
-		    "properties": {
-			"display_unit": "K",
-			"graph_type": "LINE",
-			"time_range": "1"
-		    }
-		},
-		{
-		    "widget_name": "Lines Read",
-		    "description": "Lines Read",
-		    "widget_type": "GRAPH",
-		    "is_visible": true,
-		    "metrics": [
-			{
-			    "name": "input.files.read_lines._sum",
-			    "metric_path": "metrics/input/files/read_lines",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			}
-
-		    ],
-		    "values": [
-			{
-			    "name": "Lines read from File",
-			    "value": "${input.files.read_lines._sum/1024}"
-			}
-		    ],
-		    "properties": {
-			"display_unit": "K",
-			"graph_type": "LINE",
-			"time_range": "1"
-		    }
-		},
-		{
-		    "widget_name": "Read/Write Bytes",
-		    "description": "Bytes read and written",
-		    "widget_type": "GRAPH",
-		    "is_visible": true,
-		    "metrics": [
-			{
-			    "name": "input.files.read_bytes._sum",
-			    "metric_path": "metrics/input/files/read_bytes",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			},
-			{
-			    "name": "output.solr.write_bytes._sum",
-			    "metric_path": "metrics/output/solr/write_bytes",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			},
-			{
-			    "name": "output.kafka.write_bytes._sum",
-			    "metric_path": "metrics/output/kafka/write_bytes",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			}
-		    ],
-		    "values": [
-			{
-			    "name": "Data read from File",
-			    "value": "${input.files.read_bytes._sum/(1024*1024)}"
-			},
-			{
-			    "name": "Data sent to Solr",
-			    "value": "${output.solr.write_bytes._sum/(1024*1024)}"
-			},
-			{
-			    "name": "Data sent to Kafka",
-			    "value": "${output.kafka.write_bytes._sum/(1024*1024)}"
-			}
-		    ],
-		    "properties": {
-			"display_unit": "MB",
-			"graph_type": "LINE",
-			"time_range": "1"
-		    }
-		},
-		{
-		    "widget_name": "Parse Errors",
-		    "description": "Parse Errors",
-		    "widget_type": "GRAPH",
-		    "is_visible": true,
-		    "metrics": [
-			{
-			    "name": "filter.error.grok._sum",
-			    "metric_path": "metrics/filter/error/grok",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			},
-			{
-			    "name": "filter.error.truncate._sum",
-			    "metric_path": "metrics/filter/error/truncate",
-			    "service_name": "LOGSEARCH",
-			    "component_name": "LOGSEARCH_LOGFEEDER"
-			}
-		    ],
-		    "values": [
-			{
-			    "name": "Grok Errors",
-			    "value": "${filter.error.grok._sum}"
-			},
-			{
-			    "name": "Logs Truncated",
-			    "value": "${filter.error.truncate._sum}"
-			}
-		    ],
-		    "properties": {
-			"graph_type": "LINE",
-			"time_range": "1"
-		    }
-		}
-	    ]
-	}
-    ]
-}


Mime
View raw message