ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From oleew...@apache.org
Subject [5/5] ambari git commit: AMBARI-17653. ACL support for Solr Znode (oleewere)
Date Tue, 12 Jul 2016 15:25:11 GMT
AMBARI-17653. ACL support for Solr Znode (oleewere)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/0e73da6e
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/0e73da6e
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/0e73da6e

Branch: refs/heads/trunk
Commit: 0e73da6efddf15aaae724ac7665764a25a713497
Parents: b3226b3
Author: oleewere <oleewere@gmail.com>
Authored: Tue Jul 12 17:22:25 2016 +0200
Committer: oleewere <oleewere@gmail.com>
Committed: Tue Jul 12 17:22:25 2016 +0200

----------------------------------------------------------------------
 .../libraries/functions/solr_cloud_util.py      |   85 +-
 .../src/main/resources/config.json.j2           | 1004 ----------
 .../src/main/resources/global.config.json.j2    |   28 -
 .../src/main/resources/input.config.json.j2     |  284 ---
 .../src/main/resources/log4j.xml.j2             |   60 -
 .../src/main/resources/output.config.json.j2    |   97 -
 .../src/main/scripts/run.sh.j2                  |   83 -
 .../audit_logs/conf/solrconfig.xml.j2           | 1887 ------------------
 .../configsets/audit_logs/core.properties.j2    |   20 -
 .../hadoop_logs/conf/solrconfig.xml.j2          | 1887 ------------------
 .../src/main/resources/log4j.xml.j2             |   82 -
 .../src/main/resources/logsearch.properties.j2  |   38 -
 .../src/main/scripts/run.sh.j2                  |   86 -
 .../ambari-logsearch-solr-client/pom.xml        |    4 +
 .../logsearch/solr/AmbariSolrCloudCLI.java      |  172 +-
 .../logsearch/solr/AmbariSolrCloudClient.java   |  136 +-
 .../solr/AmbariSolrCloudClientBuilder.java      |   36 +
 .../commands/AbstractStateFileZkCommand.java    |   42 +
 .../AbstractZookeeperConfigCommand.java         |   44 +
 .../commands/AbstractZookeeperRetryCommand.java |   12 +-
 .../solr/commands/CheckConfigZkCommand.java     |    4 +-
 .../solr/commands/CheckZnodeZkCommand.java      |   45 +
 .../solr/commands/CopyZnodeZkCommand.java       |   79 +
 .../solr/commands/CreateSaslUsersZkCommand.java |   61 +
 .../solr/commands/CreateSolrZnodeZkCommand.java |   42 +
 .../solr/commands/DownloadConfigZkCommand.java  |   15 +-
 .../EnableKerberosPluginSolrZkCommand.java      |   75 +
 .../solr/commands/GetStateFileZkCommand.java    |   43 +
 .../solr/commands/SecureZNodeZkCommand.java     |   48 +
 .../commands/SetClusterPropertyZkCommand.java   |   40 +
 .../solr/commands/UpdateStateFileZkCommand.java |   84 +
 .../solr/commands/UploadConfigZkCommand.java    |    6 +-
 .../logsearch/solr/domain/AmbariSolrState.java  |   26 +
 .../ambari/logsearch/solr/util/AclUtils.java    |   71 +
 .../src/main/resources/log4j.properties         |   11 +-
 .../ATLAS/0.1.0.2.3/package/scripts/metadata.py |   32 +-
 .../0.5.0/configuration/logsearch-solr-env.xml  |    4 +-
 .../0.5.0/package/scripts/service_check.py      |    2 +-
 .../0.5.0/package/scripts/setup_logsearch.py    |    3 +-
 .../package/scripts/setup_logsearch_solr.py     |   45 +-
 .../0.4.0/package/scripts/setup_ranger_xml.py   |   16 +-
 .../stacks/2.3/ATLAS/test_metadata_server.py    |    1 +
 .../python/stacks/2.4/LOGSEARCH/test_solr.py    |   16 +-
 .../stacks/2.5/ATLAS/test_atlas_server.py       |    2 +-
 .../stacks/2.5/RANGER/test_ranger_admin.py      |    2 +
 45 files changed, 1214 insertions(+), 5646 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/0e73da6e/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
index 4ecddf2..82db1bb 100644
--- a/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
+++ b/ambari-common/src/main/python/resource_management/libraries/functions/solr_cloud_util.py
@@ -23,11 +23,16 @@ from resource_management.core.resources.system import Directory, Execute, File
 from resource_management.core.shell import as_user
 from resource_management.core.source import StaticFile, InlineTemplate
 
-__all__ = ["upload_configuration_to_zk", "create_collection"]
+__all__ = ["upload_configuration_to_zk", "create_collection", "setup_kerberos", "set_cluster_prop",
+           "setup_kerberos_plugin", "create_znode", "check_znode", "create_sasl_users"]
 
-def __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home):
+def __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, separated_znode=False):
   solr_cli_prefix = format('export JAVA_HOME={java64_home} ; /usr/lib/ambari-logsearch-solr-client/solrCloudCli.sh ' \
-                           '--zookeeper-connect-string {zookeeper_quorum}{solr_znode}')
+                           '--zookeeper-connect-string {zookeeper_quorum}')
+  if separated_znode:
+    solr_cli_prefix+=format(' --znode {solr_znode}')
+  else:
+    solr_cli_prefix+=format('{solr_znode}')
   return solr_cli_prefix
 
 def __append_flags_if_exists(command, flagsDict):
@@ -38,7 +43,7 @@ def __append_flags_if_exists(command, flagsDict):
 
 
 def upload_configuration_to_zk(zookeeper_quorum, solr_znode, config_set, config_set_dir, tmp_dir,
-                         java64_home, user, retry = 5, interval = 10, solrconfig_content = None):
+                         java64_home, user, retry = 5, interval = 10, solrconfig_content = None, jaas_file=None):
   """
   Upload configuration set to zookeeper with solrCloudCli.sh
   At first, it tries to download configuration set if exists into a temporary location, then upload that one to
@@ -52,6 +57,8 @@ def upload_configuration_to_zk(zookeeper_quorum, solr_znode, config_set, config_
           only_if=as_user(format("{solr_cli_prefix} --check-config --config-set {config_set} --retry {retry} --interval {interval}"), user),
           user=user
           )
+  appendableDict = {}
+  appendableDict["--jaas-file"] = jaas_file
 
   if solrconfig_content is not None:
       File(format("{tmp_config_set_dir}/solrconfig.xml"),
@@ -59,15 +66,15 @@ def upload_configuration_to_zk(zookeeper_quorum, solr_znode, config_set, config_
        owner=user,
        only_if=format("test -d {tmp_config_set_dir}")
       )
-
-      Execute(format(
-        '{solr_cli_prefix} --upload-config --config-dir {tmp_config_set_dir} --config-set {config_set} --retry {retry} --interval {interval}'),
+      upload_tmp_config_cmd = format('{solr_cli_prefix} --upload-config --config-dir {tmp_config_set_dir} --config-set {config_set} --retry {retry} --interval {interval}')
+      upload_tmp_config_cmd = __append_flags_if_exists(upload_tmp_config_cmd, appendableDict)
+      Execute(upload_tmp_config_cmd,
         user=user,
         only_if=format("test -d {tmp_config_set_dir}")
       )
-
-  Execute(format(
-    '{solr_cli_prefix} --upload-config --config-dir {config_set_dir} --config-set {config_set} --retry {retry} --interval {interval}'),
+  upload_config_cmd = format('{solr_cli_prefix} --upload-config --config-dir {config_set_dir} --config-set {config_set} --retry {retry} --interval {interval}')
+  upload_config_cmd = __append_flags_if_exists(upload_config_cmd, appendableDict)
+  Execute(upload_config_cmd,
     user=user,
     not_if=format("test -d {tmp_config_set_dir}")
   )
@@ -100,7 +107,6 @@ def create_collection(zookeeper_quorum, solr_znode, collection, config_set, java
   create_collection_cmd = format('{solr_cli_prefix} --create-collection --collection {collection} --config-set {config_set} '\
                                  '--shards {shards} --replication {replication_factor} --max-shards {max_shards} --retry {retry} '\
                                  '--interval {interval} --no-sharding')
-
   appendableDict = {}
   appendableDict["--router-name"] = router_name
   appendableDict["--router-field"] = router_field
@@ -114,9 +120,64 @@ def create_collection(zookeeper_quorum, solr_znode, collection, config_set, java
   create_collection_cmd = __append_flags_if_exists(create_collection_cmd, appendableDict)
   create_collection_cmd = format(create_collection_cmd, key_store_password_param=key_store_password, trust_store_password_param=trust_store_password)
 
-
   Execute(create_collection_cmd, user=user)
 
+def setup_kerberos(zookeeper_quorum, solr_znode, copy_from_znode, java64_home, user, secure=False, jaas_file=None):
+  """
+  Copy all unsecured (or secured) Znode content to a secured (or unsecured) Znode,
+  and restrict the world permissions there.
+  """
+  solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
+  setup_kerberos_cmd = format('{solr_cli_prefix} --setup-kerberos --copy-from-znode {copy_from_znode}')
+  if secure and jaas_file is not None:
+    setup_kerberos_cmd+=format(' --secure --jaas-file {jaas_file}')
+  Execute(setup_kerberos_cmd, user=user)
+
+def check_znode(zookeeper_quorum, solr_znode, java64_home, user, retry = 5, interval = 10):
+  """
+  Check znode exists or not, throws exception if does not accessible.
+  """
+  solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
+  check_znode_cmd = format('{solr_cli_prefix} --check-znode --retry {retry} --interval {interval}')
+  Execute(check_znode_cmd, user=user)
+
+def create_znode(zookeeper_quorum, solr_znode, java64_home, user, retry = 5 , interval = 10):
+  """
+  Create znode if does not exists, throws exception if zookeeper is not accessible.
+  """
+  solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
+  create_znode_cmd = format('{solr_cli_prefix} --create-znode --retry {retry} --interval {interval}')
+  Execute(create_znode_cmd, user=user)
+
+def setup_kerberos_plugin(zookeeper_quorum, solr_znode, java64_home, user, secure=False, jaas_file = None):
+  """
+  Set Kerberos plugin on the Solr znode in security.json, if secure is False, then clear the security.json
+  """
+  solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
+  setup_kerberos_plugin_cmd = format('{solr_cli_prefix} --setup-kerberos-plugin')
+  if secure and jaas_file is not None:
+    setup_kerberos_plugin_cmd+=format(' --jaas-file {jaas_file} --secure')
+  Execute(setup_kerberos_plugin_cmd, user=user)
+
+def set_cluster_prop(zookeeper_quorum, solr_znode, prop_name, prop_value, java64_home, user = None, jaas_file = None):
+  """
+  Set a cluster property on the Solr znode in clusterprops.json
+  """
+  solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home)
+  set_cluster_prop_cmd = format('{solr_cli_prefix} --cluster-prop --property-name {prop_name} --property-value {prop_value}')
+  if jaas_file is not None:
+    set_cluster_prop_cmd+=format(' --jaas-file {jaas_file}')
+  Execute(set_cluster_prop_cmd, user=user)
+
+def create_sasl_users(zookeeper_quorum, solr_znode, jaas_file, java64_home, user, sasl_users=[]):
+  """
+  Add list of sasl users to a znode
+  """
+  solr_cli_prefix = __create_solr_cloud_cli_prefix(zookeeper_quorum, solr_znode, java64_home, True)
+  sasl_users_str = ",".join(str(x) for x in sasl_users)
+  create_sasl_users_cmd = format('{solr_cli_prefix} --create-sasl-users --jaas-file {jaas_file} --sasl-users {sasl_users_str}')
+  Execute(create_sasl_users_cmd, user=user)
+
 def setup_solr_client(config, user = None, group = None, custom_log4j = True, custom_log_location = None, log4jcontent = None):
     solr_user = config['configurations']['logsearch-solr-env']['logsearch_solr_user'] if user is None else user
     solr_group = config['configurations']['cluster-env']['user_group'] if group is None else group

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e73da6e/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
deleted file mode 100644
index 3428dd8..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/config.json.j2
+++ /dev/null
@@ -1,1004 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-	"global":{
-		"add_fields":{
-			"cluster":"{{cluster_name}}"
-		},
-		"source":"file",
-		"tail":"true",
-		"gen_event_md5":"true",
-		"start_position":"beginning"
-	},
-	"input":[
-		{
-			"type":"accumulo_gc",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/gc_*.log"
-		},
-		{
-			"type":"accumulo_master",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/master_*.log"
-		},
-		{
-			"type":"accumulo_monitor",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/monitor_*.log"
-		},
-		{
-			"type":"accumulo_tracer",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/tracer_*.log"
-		},
-		{
-			"type":"accumulo_tserver",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/tserver_*.log"
-		},
-		{
-			"type":"atlas_app",
-			"rowtype":"service",
-			"path":"{{atlas_log_dir}}/application.log"
-		},
-		{
-			"type":"ambari_agent",
-			"rowtype":"service",
-			"path":"{{ambari_agent_log_dir}}/ambari-agent.log"
-		},
-		{
-			"type":"ambari_server",
-			"rowtype":"service",
-			"path":"{{ambari_server_log_dir}}/ambari-server.log"
-		},
-		{
-			"type":"ams_hbase_master",
-			"rowtype":"service",
-			"path":"{{metrics_collector_log_dir}}/hbase-ams-master-*.log"
-		},
-		{
-			"type":"ams_hbase_regionserver",
-			"rowtype":"service",
-			"path":"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log"
-		},
-		{
-			"type":"ams_collector",
-			"rowtype":"service",
-			"path":"{{metrics_collector_log_dir}}/ambari-metrics-collector.log"
-		},
-		{
-			"type":"falcon_app",
-			"rowtype":"service",
-			"path":"{{falcon_log_dir}}/falcon.application.log"
-		},
-		{
-			"type":"hbase_master",
-			"rowtype":"service",
-			"path":"{{hbase_log_dir}}/hbase-hbase-master-*.log"
-		},
-		{
-			"type":"hbase_regionserver",
-			"rowtype":"service",
-			"path":"{{hbase_log_dir}}/hbase-hbase-regionserver-*.log"
-		},
-		{
-			"type":"hdfs_datanode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-datanode-*.log"
-		},
-		{
-			"type":"hdfs_namenode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-namenode-*.log"
-		},
-		{
-			"type":"hdfs_journalnode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-journalnode-*.log"
-		},
-		{
-			"type":"hdfs_secondarynamenode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-secondarynamenode-*.log"
-		},
-		{
-			"type":"hdfs_zkfc",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-zkfc-*.log"
-		},
-		{
-			"type":"hive_hiveserver2",
-			"rowtype":"service",
-			"path":"{{hive_log_dir}}/hiveserver2.log"
-		},
-		{
-			"type":"hive_metastore",
-			"rowtype":"service",
-			"path":"{{hive_log_dir}}/hivemetastore.log"
-		},
-		{
-			"type":"kafka_controller",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/controller.log"
-		},
-		{
-			"type":"kafka_request",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/kafka-request.log"
-		},
-		{
-			"type":"kafka_logcleaner",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/log-cleaner.log"
-		},
-		{
-			"type":"kafka_server",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/server.log"
-		},
-		{
-			"type":"kafka_statechange",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/state-change.log"
-		},
-		{
-			"type":"knox_gateway",
-			"rowtype":"service",
-			"path":"{{knox_log_dir}}/gateway.log"
-		},
-		{
-			"type":"knox_cli",
-			"rowtype":"service",
-			"path":"{{knox_log_dir}}/knoxcli.log"
-		},
-		{
-			"type":"knox_ldap",
-			"rowtype":"service",
-			"path":"{{knox_log_dir}}/ldap.log"
-		},
-		{
-			"type":"mapred_historyserver",
-			"rowtype":"service",
-			"path":"{{mapred_log_dir_prefix}}/mapred/mapred-mapred-historyserver*.log"
-		},
-		{
-			"type":"logsearch_app",
-			"rowtype":"service",
-			"path":"{{logsearch_log_dir}}/logsearch.json"
-		},
-		{
-			"type":"logsearch_feeder",
-			"rowtype":"service",
-			"path":"{{logfeeder_log_dir}}/logfeeder.json"
-		},
-		{
-			"type":"logsearch_perf",
-			"rowtype":"service",
-			"path":"{{logsearch_log_dir}}/logsearch-performance.json"
-		},
-		{
-			"type":"ranger_admin",
-			"rowtype":"service",
-			"path":"{{ranger_admin_log_dir}}/xa_portal.log"
-		},
-		{
-			"type":"ranger_dbpatch",
-			"is_enabled":"true",
-			"path":"{{ranger_admin_log_dir}}/ranger_db_patch.log"
-		},
-		{
-			"type":"ranger_kms",
-			"rowtype":"service",
-			"path":"{{ranger_kms_log_dir}}/kms.log"
-		},
-		{
-			"type":"ranger_usersync",
-			"rowtype":"service",
-			"path":"{{ranger_usersync_log_dir}}/usersync.log"
-		},
-		{
-			"type":"oozie_app",
-			"rowtype":"service",
-			"path":"{{oozie_log_dir}}/oozie.log"
-		},
-		{
-			"type":"yarn_nodemanager",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-nodemanager-*.log"
-		},
-		{
-			"type":"yarn_resourcemanager",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-resourcemanager-*.log"
-		},
-		{
-			"type":"yarn_timelineserver",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-timelineserver-*.log"
-		},
-		{
-			"type":"yarn_historyserver",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-historyserver-*.log"
-		},
-		{
-			"type":"yarn_jobsummary",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/hadoop-mapreduce.jobsummary.log"
-		},
-		{
-			"type":"storm_drpc",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/drpc.log"
-		},
-		{
-			"type":"storm_logviewer",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/logviewer.log"
-		},
-		{
-			"type":"storm_nimbus",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/nimbus.log"
-		},
-		{
-			"type":"storm_supervisor",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/supervisor.log"
-		},
-		{
-			"type":"storm_ui",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/ui.log"
-		},
-		{
-			"type":"storm_worker",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/*worker*.log"
-		},
-		{
-			"type":"zookeeper",
-			"rowtype":"service",
-			"path":"{{zk_log_dir}}/zookeeper/zookeeper*.out"
-		},
-		{
-			"type":"hdfs_audit",
-			"rowtype":"audit",
-			"is_enabled":"true",
-			"add_fields":{
-				"logType":"HDFSAudit",
-				"enforcer":"hadoop-acl",
-				"repoType":"1",
-				"repo":"hdfs"
-			},
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hdfs-audit.log"
-		}
-		
-	],
-	"filter":[
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"accumulo_master"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}:%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"comment":"This one has one extra space after LEVEL",
-			"conditions":{
-				"fields":{
-					"type":[
-						"accumulo_gc",
-						"accumulo_monitor",
-						"accumulo_tracer",
-						"accumulo_tserver"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} [%-8c{2}] %-5p: %X{application} %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{JAVACLASS:logger_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}:%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"atlas_app",
-						"falcon_app"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d %-5p - [%t:%x] ~ %m (%c{1}:%L)%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{SPACE}-%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}~%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"ams_collector"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} %p %c: %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"ams_hbase_master",
-						"ams_hbase_regionserver",
-						"hbase_master",
-						"hbase_regionserver"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} %-5p [%t] %c{2}: %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"ambari_agent"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"",
-			"multiline_pattern":"^(%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{LOGLEVEL:level} %{TIMESTAMP_ISO8601:logtime} %{JAVAFILE:file}:%{INT:line_number} - %{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				},
-				"level":{
-					"map_fieldvalue":{
-						"pre_value":"WARNING",
-						"post_value":"WARN"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"ambari_server"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{DATE} %5p [%t] %c{1}:%L - %m%n",
-			"multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
-			"message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{JAVACLASS:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"dd MMM yyyy HH:mm:ss"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"hdfs_datanode",
-						"hdfs_journalnode",
-						"hdfs_secondarynamenode",
-						"hdfs_namenode",
-						"hdfs_zkfc",
-						"knox_gateway",
-						"knox_cli",
-						"knox_ldap",
-						"mapred_historyserver",
-						"yarn_historyserver",
-						"yarn_jobsummary",
-						"yarn_nodemanager",
-						"yarn_resourcemanager",
-						"yarn_timelineserver"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"hive_hiveserver2",
-						"hive_metastore"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} %-5p [%t]: %c{2} (%F:%M(%L)) - %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]:%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{JAVAMETHOD:method}\\(%{INT:line_number}\\)\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"kafka_controller",
-						"kafka_request",
-						"kafka_logcleaner"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"[%d] %p %m (%c)%n",
-			"multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-			"message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"comment":"Suppose to be same log4j pattern as other kafka processes, but some reason thread is not printed",
-			"conditions":{
-				"fields":{
-					"type":[
-						"kafka_server",
-						"kafka_statechange"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"[%d] %p %m (%c)%n",
-			"multiline_pattern":"^(\\[%{TIMESTAMP_ISO8601:logtime}\\])",
-			"message_pattern":"(?m)^\\[%{TIMESTAMP_ISO8601:logtime}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"oozie_app"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} %5p %c{1}:%L - SERVER[${oozie.instance.id}] %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{DATA:logger_name}:%{INT:line_number}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-		"filter": "json",
-		"conditions": {
-			"fields": {
-				"type": [
-					"logsearch_app",
-					"logsearch_feeder",
-					"logsearch_perf"
-					]
-				}
-			}
- 		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"ranger_admin",
-						"ranger_dbpatch"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d [%t] %-5p %C{6} (%F:%L) - %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\(%{JAVAFILE:file}:%{INT:line_number}\\)%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"ranger_kms"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} %-5p %c{1} - %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"ranger_usersync"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{dd MMM yyyy HH:mm:ss} %5p %c{1} [%t] - %m%n",
-			"multiline_pattern":"^(%{USER_SYNC_DATE:logtime})",
-			"message_pattern":"(?m)^%{USER_SYNC_DATE:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{DATA:thread_name}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"dd MMM yyyy HH:mm:ss"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"storm_drpc",
-						"storm_logviewer",
-						"storm_nimbus",
-						"storm_supervisor",
-						"storm_ui",
-						"storm_worker"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{JAVACLASS:logger_name}%{SPACE}\\[%{LOGLEVEL:level}\\]%{SPACE}%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss.SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"zookeeper"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} - %-5p [%t:%C{1}@%L] - %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:logtime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}-%{SPACE}%{LOGLEVEL:level}%{SPACE}\\[%{DATA:thread_name}\\@%{INT:line_number}\\]%{SPACE}-%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"logtime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"conditions":{
-				"fields":{
-					"type":[
-						"hdfs_audit"
-					]
-					
-				}
-				
-			},
-			"log4j_format":"%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n",
-			"multiline_pattern":"^(%{TIMESTAMP_ISO8601:evtTime})",
-			"message_pattern":"(?m)^%{TIMESTAMP_ISO8601:evtTime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{JAVACLASS:logger_name}:%{SPACE}%{GREEDYDATA:log_message}",
-			"post_map_values":{
-				"evtTime":{
-					"map_date":{
-						"date_pattern":"yyyy-MM-dd HH:mm:ss,SSS"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"keyvalue",
-			"sort_order":1,
-			"conditions":{
-				"fields":{
-					"type":[
-						"hdfs_audit"
-					]
-					
-				}
-				
-			},
-			"source_field":"log_message",
-			"value_split":"=",
-			"field_split":"\t",
-			"post_map_values":{
-				"src":{
-					"map_fieldname":{
-						"new_fieldname":"resource"
-					}
-					
-				},
-				"ip":{
-					"map_fieldname":{
-						"new_fieldname":"cliIP"
-					}
-					
-				},
-				"allowed":[
-					{
-						"map_fieldvalue":{
-							"pre_value":"true",
-							"post_value":"1"
-						}
-						
-					},
-					{
-						"map_fieldvalue":{
-							"pre_value":"false",
-							"post_value":"0"
-						}
-						
-					},
-					{
-						"map_fieldname":{
-							"new_fieldname":"result"
-						}
-						
-					}
-					
-				],
-				"cmd":{
-					"map_fieldname":{
-						"new_fieldname":"action"
-					}
-					
-				},
-				"proto":{
-					"map_fieldname":{
-						"new_fieldname":"cliType"
-					}
-					
-				},
-				"callerContext":{
-					"map_fieldname":{
-						"new_fieldname":"req_caller_id"
-					}
-					
-				}
-				
-			}
-			
-		},
-		{
-			"filter":"grok",
-			"sort_order":2,
-			"source_field":"ugi",
-			"remove_source_field":"false",
-			"conditions":{
-				"fields":{
-					"type":[
-						"hdfs_audit"
-					]
-					
-				}
-				
-			},
-			"message_pattern":"%{USERNAME:p_user}.+auth:%{USERNAME:p_authType}.+via %{USERNAME:k_user}.+auth:%{USERNAME:k_authType}|%{USERNAME:user}.+auth:%{USERNAME:authType}|%{USERNAME:x_user}",
-			"post_map_values":{
-				"user":{
-					"map_fieldname":{
-						"new_fieldname":"reqUser"
-					}
-					
-				},
-				"x_user":{
-					"map_fieldname":{
-						"new_fieldname":"reqUser"
-					}
-					
-				},
-				"p_user":{
-					"map_fieldname":{
-						"new_fieldname":"reqUser"
-					}
-					
-				},
-				"k_user":{
-					"map_fieldname":{
-						"new_fieldname":"proxyUsers"
-					}
-					
-				},
-				"p_authType":{
-					"map_fieldname":{
-						"new_fieldname":"authType"
-					}
-					
-				},
-				"k_authType":{
-					"map_fieldname":{
-						"new_fieldname":"proxyAuthType"
-					}
-					
-				}
-				
-			}
-			
-		}
-		
-	],
-	"output":[
-		{
-			"is_enabled":"{{solr_service_logs_enable}}",
-			"comment":"Output to solr for service logs",
-			"destination":"solr",
-			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
-			"collection":"{{logsearch_solr_collection_service_logs}}",
-			"number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
-			"splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"service"
-					]
-					
-				}
-				
-			}
-			
-		},
-		{
-			"comment":"Output to solr for audit records",
-			"is_enabled":"{{solr_audit_logs_enable}}",
-			"destination":"solr",
-			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
-			"collection":"{{logsearch_solr_collection_audit_logs}}",
-			"number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
-			"splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"audit"
-					]
-					
-				}
-				
-			}
-			
-		},
-		{
-			"is_enabled":"{{kafka_service_logs_enable}}",
-			"destination":"kafka",
-			"broker_list":"{{kafka_broker_list}}",
-			"topic":"{{kafka_topic_service_logs}}",
-			"kafka.security.protocol":"{{kafka_security_protocol}}",
-			"kafka.sasl.kerberos.service.name":"{{kafka_kerberos_service_name}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"service"
-					]
-					
-				}
-				
-			}
-			
-		},
-		{
-			"is_enabled":"{{kafka_topic_service_logs}}",
-			"destination":"kafka",
-			"broker_list":"{{kafka_broker_list}}",
-			"topic":"{{kafka_topic_audit_logs}}",
-			"kafka.security.protocol":"{{kafka_security_protocol}}",
-			"kafka.sasl.kerberos.service.name":"{{kafka_kerberos_service_name}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"audit"
-					]
-					
-				}
-				
-			}
-			
-		}
-		
-	]
-	
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e73da6e/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/global.config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/global.config.json.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/global.config.json.j2
deleted file mode 100644
index cd51118..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/global.config.json.j2
+++ /dev/null
@@ -1,28 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-	"global":{
-		"add_fields":{
-			"cluster":"{{cluster_name}}"
-		},
-		"source":"file",
-		"tail":"true",
-		"gen_event_md5":"true",
-		"start_position":"beginning"
-	}	
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e73da6e/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/input.config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/input.config.json.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/input.config.json.j2
deleted file mode 100644
index bc48503..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/input.config.json.j2
+++ /dev/null
@@ -1,284 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-	"input":[
-		{
-			"type":"accumulo_gc",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/gc_*.log"
-		},
-		{
-			"type":"accumulo_master",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/master_*.log"
-		},
-		{
-			"type":"accumulo_monitor",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/monitor_*.log"
-		},
-		{
-			"type":"accumulo_tracer",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/tracer_*.log"
-		},
-		{
-			"type":"accumulo_tserver",
-			"rowtype":"service",
-			"path":"{{accumulo_log_dir}}/tserver_*.log"
-		},
-		{
-			"type":"atlas_app",
-			"rowtype":"service",
-			"path":"{{atlas_log_dir}}/application.log"
-		},
-		{
-			"type":"ambari_agent",
-			"rowtype":"service",
-			"path":"{{ambari_agent_log_dir}}/ambari-agent.log"
-		},
-		{
-			"type":"ambari_server",
-			"rowtype":"service",
-			"path":"{{ambari_server_log_dir}}/ambari-server.log"
-		},
-		{
-			"type":"ams_hbase_master",
-			"rowtype":"service",
-			"path":"{{metrics_collector_log_dir}}/hbase-ams-master-*.log"
-		},
-		{
-			"type":"ams_hbase_regionserver",
-			"rowtype":"service",
-			"path":"{{metrics_collector_log_dir}}/hbase-ams-regionserver-*.log"
-		},
-		{
-			"type":"ams_collector",
-			"rowtype":"service",
-			"path":"{{metrics_collector_log_dir}}/ambari-metrics-collector.log"
-		},
-		{
-			"type":"falcon_app",
-			"rowtype":"service",
-			"path":"{{falcon_log_dir}}/falcon.application.log"
-		},
-		{
-			"type":"hbase_master",
-			"rowtype":"service",
-			"path":"{{hbase_log_dir}}/hbase-hbase-master-*.log"
-		},
-		{
-			"type":"hbase_regionserver",
-			"rowtype":"service",
-			"path":"{{hbase_log_dir}}/hbase-hbase-regionserver-*.log"
-		},
-		{
-			"type":"hdfs_datanode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-datanode-*.log"
-		},
-		{
-			"type":"hdfs_namenode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-namenode-*.log"
-		},
-		{
-			"type":"hdfs_journalnode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-journalnode-*.log"
-		},
-		{
-			"type":"hdfs_secondarynamenode",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-secondarynamenode-*.log"
-		},
-		{
-			"type":"hdfs_zkfc",
-			"rowtype":"service",
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hadoop-hdfs-zkfc-*.log"
-		},
-		{
-			"type":"hive_hiveserver2",
-			"rowtype":"service",
-			"path":"{{hive_log_dir}}/hiveserver2.log"
-		},
-		{
-			"type":"hive_metastore",
-			"rowtype":"service",
-			"path":"{{hive_log_dir}}/hivemetastore.log"
-		},
-		{
-			"type":"kafka_controller",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/controller.log"
-		},
-		{
-			"type":"kafka_request",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/kafka-request.log"
-		},
-		{
-			"type":"kafka_logcleaner",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/log-cleaner.log"
-		},
-		{
-			"type":"kafka_server",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/server.log"
-		},
-		{
-			"type":"kafka_statechange",
-			"rowtype":"service",
-			"path":"{{kafka_log_dir}}/state-change.log"
-		},
-		{
-			"type":"knox_gateway",
-			"rowtype":"service",
-			"path":"{{knox_log_dir}}/gateway.log"
-		},
-		{
-			"type":"knox_cli",
-			"rowtype":"service",
-			"path":"{{knox_log_dir}}/knoxcli.log"
-		},
-		{
-			"type":"knox_ldap",
-			"rowtype":"service",
-			"path":"{{knox_log_dir}}/ldap.log"
-		},
-		{
-			"type":"mapred_historyserver",
-			"rowtype":"service",
-			"path":"{{mapred_log_dir_prefix}}/mapred/mapred-mapred-historyserver*.log"
-		},
-		{
-			"type":"logsearch_app",
-			"rowtype":"service",
-			"path":"{{logsearch_log_dir}}/logsearch.log"
-		},
-		{
-			"type":"logsearch_feeder",
-			"rowtype":"service",
-			"path":"{{logfeeder_log_dir}}/logfeeder.log"
-		},
-		{
-			"type":"logsearch_perf",
-			"rowtype":"service",
-			"path":"{{logsearch_log_dir}}/logsearch-performance.log"
-		},
-		{
-			"type":"ranger_admin",
-			"rowtype":"service",
-			"path":"{{ranger_admin_log_dir}}/xa_portal.log"
-		},
-		{
-			"type":"ranger_dbpatch",
-			"is_enabled":"true",
-			"path":"{{ranger_admin_log_dir}}/ranger_db_patch.log"
-		},
-		{
-			"type":"ranger_kms",
-			"rowtype":"service",
-			"path":"{{ranger_kms_log_dir}}/kms.log"
-		},
-		{
-			"type":"ranger_usersync",
-			"rowtype":"service",
-			"path":"{{ranger_usersync_log_dir}}/usersync.log"
-		},
-		{
-			"type":"oozie_app",
-			"rowtype":"service",
-			"path":"{{oozie_log_dir}}/oozie.log"
-		},
-		{
-			"type":"yarn_nodemanager",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-nodemanager-*.log"
-		},
-		{
-			"type":"yarn_resourcemanager",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-resourcemanager-*.log"
-		},
-		{
-			"type":"yarn_timelineserver",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-timelineserver-*.log"
-		},
-		{
-			"type":"yarn_historyserver",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/yarn-yarn-historyserver-*.log"
-		},
-		{
-			"type":"yarn_jobsummary",
-			"rowtype":"service",
-			"path":"{{yarn_log_dir_prefix}}/yarn/hadoop-mapreduce.jobsummary.log"
-		},
-		{
-			"type":"storm_drpc",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/drpc.log"
-		},
-		{
-			"type":"storm_logviewer",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/logviewer.log"
-		},
-		{
-			"type":"storm_nimbus",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/nimbus.log"
-		},
-		{
-			"type":"storm_supervisor",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/supervisor.log"
-		},
-		{
-			"type":"storm_ui",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/ui.log"
-		},
-		{
-			"type":"storm_worker",
-			"rowtype":"service",
-			"path":"{{storm_log_dir}}/*worker*.log"
-		},
-		{
-			"type":"zookeeper",
-			"rowtype":"service",
-			"path":"{{zk_log_dir}}/zookeeper/zookeeper*.out"
-		},
-		{
-			"type":"hdfs_audit",
-			"rowtype":"audit",
-			"is_enabled":"true",
-			"add_fields":{
-				"logType":"HDFSAudit",
-				"enforcer":"hadoop-acl",
-				"repoType":"1",
-				"repo":"hdfs"
-			},
-			"path":"{{hdfs_log_dir_prefix}}/hdfs/hdfs-audit.log"
-		}
-		
-	]	
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e73da6e/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml.j2
deleted file mode 100644
index 4338ee3..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/log4j.xml.j2
+++ /dev/null
@@ -1,60 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
-
-<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
-  <appender name="console" class="org.apache.log4j.ConsoleAppender">
-    <param name="Target" value="System.out" />
-    <layout class="org.apache.log4j.PatternLayout">
-      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n" />
-      <!-- <param name="ConversionPattern" value="%d [%t] %-5p %c %x - %m%n"/> -->
-    </layout>
-  </appender>
-
-  <appender name="rolling_file" class="org.apache.log4j.RollingFileAppender">
-    <param name="file" value="{{logfeeder_log_dir}}/logfeeder.log" />
-    <param name="append" value="true" />
-    <layout class="org.apache.log4j.PatternLayout">
-      <param name="ConversionPattern" value="%d [%t] %-5p %C{6} (%F:%L) - %m%n"/>
-    </layout>
-  </appender>
-
-  <!-- Logs to suppress BEGIN -->
-  <category name="org.apache.solr.common.cloud.ZkStateReader" additivity="false">
-    <priority value="error" />
-    <appender-ref ref="rolling_file" />
-  </category>
-
-  <category name="apache.solr.client.solrj.impl.CloudSolrClient" additivity="false">
-    <priority value="fatal" />
-    <appender-ref ref="rolling_file" />
-  </category>
-
-  <!-- Logs to suppress END -->
-
-  <category name="org.apache.ambari.logfeeder" additivity="false">
-    <priority value="{{logfeeder_log_level}}" />
-    <appender-ref ref="rolling_file" />
-  </category>
-
-  <root>
-    <level value="warn" />
-    <!-- <appender-ref ref="console" /> -->
-    <appender-ref ref="rolling_file" />
-  </root>
-</log4j:configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e73da6e/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2
deleted file mode 100644
index 63c590e..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/resources/output.config.json.j2
+++ /dev/null
@@ -1,97 +0,0 @@
-{#
- # Licensed to the Apache Software Foundation (ASF) under one
- # or more contributor license agreements.  See the NOTICE file
- # distributed with this work for additional information
- # regarding copyright ownership.  The ASF licenses this file
- # to you under the Apache License, Version 2.0 (the
- # "License"); you may not use this file except in compliance
- # with the License.  You may obtain a copy of the License at
- #
- #   http://www.apache.org/licenses/LICENSE-2.0
- #
- # Unless required by applicable law or agreed to in writing, software
- # distributed under the License is distributed on an "AS IS" BASIS,
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- # See the License for the specific language governing permissions and
- # limitations under the License.
- #}
-{
-	"output":[
-		{
-			"is_enabled":"{{solr_service_logs_enable}}",
-			"comment":"Output to solr for service logs",
-			"destination":"solr",
-			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
-			"collection":"{{logsearch_solr_collection_service_logs}}",
-			"number_of_shards": "{{logsearch_collection_service_logs_numshards}}",
-			"splits_interval_mins": "{{logsearch_service_logs_split_interval_mins}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"service"
-					]
-					
-				}
-				
-			}
-			
-		},
-		{
-			"comment":"Output to solr for audit records",
-			"is_enabled":"{{solr_audit_logs_enable}}",
-			"destination":"solr",
-			"zk_connect_string":"{{zookeeper_quorum}}{{logsearch_solr_znode}}",
-			"collection":"{{logsearch_solr_collection_audit_logs}}",
-			"number_of_shards": "{{logsearch_collection_audit_logs_numshards}}",
-			"splits_interval_mins": "{{logsearch_audit_logs_split_interval_mins}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"audit"
-					]
-					
-				}
-				
-			}
-			
-		},
-		{
-			"is_enabled":"{{kafka_service_logs_enable}}",
-			"destination":"kafka",
-			"broker_list":"{{kafka_broker_list}}",
-			"topic":"{{kafka_topic_service_logs}}",
-			"kafka.security.protocol":"{{kafka_security_protocol}}",
-			"kafka.sasl.kerberos.service.name":"{{kafka_kerberos_service_name}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"service"
-					]
-					
-				}
-				
-			}
-			
-		},
-		{
-			"is_enabled":"{{kafka_topic_service_logs}}",
-			"destination":"kafka",
-			"broker_list":"{{kafka_broker_list}}",
-			"topic":"{{kafka_topic_audit_logs}}",
-			"kafka.security.protocol":"{{kafka_security_protocol}}",
-			"kafka.sasl.kerberos.service.name":"{{kafka_kerberos_service_name}}",
-			"conditions":{
-				"fields":{
-					"rowtype":[
-						"audit"
-					]
-					
-				}
-				
-			}
-			
-		}
-		
-	]
-	
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/0e73da6e/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh.j2
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh.j2 b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh.j2
deleted file mode 100644
index 713a73a..0000000
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/scripts/run.sh.j2
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-curr_dir=`pwd`
-cd `dirname $0`; script_dir=`pwd`; cd $curr_dir
-
-foreground=0
-if [ "$1" = "-foreground" ]; then
-    foreground=1
-    shift
-fi
-
-JAVA=java
-if [ -x $JAVA_HOME/bin/java ]; then
-    JAVA=$JAVA_HOME/bin/java
-fi
-
-if [ "$LOGFEEDER_JAVA_MEM" = "" ]; then
-    LOGFEEDER_JAVA_MEM="-Xmx512m"
-fi
-
-if [ "$LOGFILE" = "" ]; then
-    LOGFILE="{{logfeeder_log_dir}}/logfeeder.out"
-fi
-
-if [ "$PID_FILE" = "" ]; then
-    LOGFEEDER_PID_DIR=$HOME
-    PID_FILE=$LOGFEEDER_PID_DIR/logsearch-logfeeder-$USER.pid
-fi
-
-if [ "$LOGFEEDER_CONF_DIR" = "" ]; then
-    LOGFEEDER_CONF_DIR="/etc/logfeeder/conf"
-fi
-
-LOGFEEDER_GC_LOGFILE=`dirname $LOGFILE`/logfeeder_gc.log
-LOGFEEDER_GC_OPTS="-XX:+PrintGCDetails -XX:+PrintGCDateStamps -Xloggc:$LOGFEEDER_GC_LOGFILE"
-
-#LOGFEEDER_JAVA_OPTS=
-#JMX="-Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.port=2098"
-
-if [ $foreground -eq 0 ]; then
-    if [ -f ${PID_FILE} ]; then
-	PID=`cat ${PID_FILE}`
-	if kill -0 $PID 2>/dev/null; then
-	    echo "logfeeder already running (${PID}) killing..."
-	    kill $PID 2>/dev/null
-	    sleep 5
-	    if kill -0 $PID 2>/dev/null; then
-		echo "logfeeder still running. Will kill process forcefully in another 10 seconds..."
-		sleep 10
-		kill -9 $PID 2>/dev/null
-		sleep 2
-	    fi
-	fi
-
-	if kill -0 $PID 2>/dev/null; then
-	    echo "ERROR: Even after all efforts to stop logfeeder, it is still running. pid=$PID. Please manually kill the service and try again."
-	    exit 1
-	fi
-    fi
-
-    echo "Starting logfeeder. Output file=$LOGFILE pid_file=$PID_FILE"
-    #LOGFEEDER_CLI_CLASSPATH=
-    #set -x
-    nohup $JAVA -cp "$LOGFEEDER_CLI_CLASSPATH:/etc/logfeeder/conf:$script_dir/libs/*:$script_dir/classes:$script_dir/LogProcessor.jar" $LOGFEEDER_GC_OPTS $LOGFEEDER_JAVA_MEM $LOGFEEDER_JAVA_OPTS $JMX org.apache.ambari.logfeeder.LogFeeder $* > $LOGFILE 2>&1 &
-    echo $! > $PID_FILE
-else
-    $JAVA -cp "$LOGFEEDER_CLI_CLASSPATH:$LOGFEEDER_CONF_DIR:$script_dir/libs/*:$script_dir/classes:$script_dir/LogProcessor.jar" $LOGFEEDER_JAVA_MEM $LOGFEEDER_JAVA_OPTS $JMX org.apache.ambari.logfeeder.LogFeeder $*
-fi
-


Mime
View raw message