hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From a...@apache.org
Subject hadoop git commit: HDDS-566. Move OzoneSecure docker-compose after HDDS-447. Contributed by Xiaoyu Yao.
Date Tue, 02 Oct 2018 17:07:54 GMT
Repository: hadoop
Updated Branches:
  refs/heads/HDDS-4 9363d8fc8 -> fed478a93


HDDS-566. Move OzoneSecure docker-compose after HDDS-447. Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fed478a9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fed478a9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fed478a9

Branch: refs/heads/HDDS-4
Commit: fed478a9389dd05a8b21ae50b9669c580205c729
Parents: 9363d8f
Author: Ajay Kumar <ajay@apache.com>
Authored: Tue Oct 2 10:07:35 2018 -0700
Committer: Ajay Kumar <ajay@apache.com>
Committed: Tue Oct 2 10:07:35 2018 -0700

----------------------------------------------------------------------
 hadoop-dist/src/main/compose/ozonesecure/.env   |  18 ---
 .../compose/ozonesecure/docker-compose.yaml     |  57 -------
 .../src/main/compose/ozonesecure/docker-config  | 103 -------------
 .../ozonesecure/docker-image/runner/Dockerfile  |  39 -----
 .../ozonesecure/docker-image/runner/build.sh    |  26 ----
 .../docker-image/runner/scripts/envtoconf.py    | 115 --------------
 .../docker-image/runner/scripts/krb5.conf       |  38 -----
 .../docker-image/runner/scripts/starter.sh      | 100 -------------
 .../runner/scripts/transformation.py            | 150 -------------------
 .../dist/src/main/compose/ozonesecure/.env      |  18 +++
 .../compose/ozonesecure/docker-compose.yaml     |  57 +++++++
 .../src/main/compose/ozonesecure/docker-config  | 103 +++++++++++++
 .../ozonesecure/docker-image/runner/Dockerfile  |  39 +++++
 .../ozonesecure/docker-image/runner/build.sh    |  26 ++++
 .../docker-image/runner/scripts/envtoconf.py    | 115 ++++++++++++++
 .../docker-image/runner/scripts/krb5.conf       |  38 +++++
 .../docker-image/runner/scripts/starter.sh      | 100 +++++++++++++
 .../runner/scripts/transformation.py            | 150 +++++++++++++++++++
 18 files changed, 646 insertions(+), 646 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/.env
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/.env b/hadoop-dist/src/main/compose/ozonesecure/.env
deleted file mode 100644
index a494004..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/.env
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-HDDS_VERSION=${hdds.version}
-SRC_VOLUME=../../

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-compose.yaml
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-compose.yaml b/hadoop-dist/src/main/compose/ozonesecure/docker-compose.yaml
deleted file mode 100644
index 42ab05e..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-compose.yaml
+++ /dev/null
@@ -1,57 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3"
-services:
-   kdc:
-      image: ahadoop/kdc:v1
-      hostname: kdc
-      volumes:
-          - $SRC_VOLUME:/opt/hadoop
-   datanode:
-      image: ahadoop/runner:latest
-      volumes:
-        - $SRC_VOLUME:/opt/hadoop
-      hostname: datanode
-      ports:
-        - 9864
-      command: ["/opt/hadoop/bin/ozone","datanode"]
-      env_file:
-        - ./docker-config
-   ozoneManager:
-      image: ahadoop/runner:latest
-      hostname: om
-      volumes:
-         - $SRC_VOLUME:/opt/hadoop
-      ports:
-         - 9874:9874
-      environment:
-         ENSURE_OM_INITIALIZED: /data/metadata/ozoneManager/current/VERSION
-      env_file:
-          - ./docker-config
-      command: ["/opt/hadoop/bin/ozone","om"]
-   scm:
-      image: ahadoop/runner:latest
-      hostname: scm
-      volumes:
-         - $SRC_VOLUME:/opt/hadoop
-      ports:
-         - 9876:9876
-      env_file:
-          - ./docker-config
-      environment:
-          ENSURE_SCM_INITIALIZED: /data/metadata/scm/current/VERSION
-      command: ["/opt/hadoop/bin/ozone","scm"]

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-config
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-config b/hadoop-dist/src/main/compose/ozonesecure/docker-config
deleted file mode 100644
index 704dc7b..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-config
+++ /dev/null
@@ -1,103 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-OZONE-SITE.XML_ozone.om.address=om
-OZONE-SITE.XML_ozone.om.http-address=om:9874
-OZONE-SITE.XML_ozone.scm.names=scm
-OZONE-SITE.XML_ozone.enabled=True
-OZONE-SITE.XML_ozone.scm.datanode.id=/data/datanode.id
-OZONE-SITE.XML_ozone.scm.block.client.address=scm
-OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata
-OZONE-SITE.XML_ozone.handler.type=distributed
-OZONE-SITE.XML_ozone.scm.client.address=scm
-OZONE-SITE.XML_ozone.replication=1
-OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/scm@EXAMPLE.COM
-OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
-OZONE-SITE.XML_ozone.om.kerberos.principal=om/om@EXAMPLE.COM
-OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
-OZONE-SITE.XML_ozone.security.enabled=true
-OZONE-SITE.XML_hdds.scm.http.kerberos.principal=HTTP/scm@EXAMPLE.COM
-OZONE-SITE.XML_hdds.scm.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-OZONE-SITE.XML_ozone.om.http.kerberos.principal=HTTP/om@EXAMPLE.COM
-OZONE-SITE.XML_ozone.om.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/_HOST@EXAMPLE.COM
-HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
-HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/_HOST@EXAMPLE.COM
-HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
-HDFS-SITE.XML_dfs.datanode.address=0.0.0.0:1019
-HDFS-SITE.XML_dfs.datanode.http.address=0.0.0.0:1012
-CORE-SITE.XML_dfs.data.transfer.protection=authentication
-CORE-SITE.XML_hadoop.security.authentication=kerberos
-CORE-SITE.XML_hadoop.security.auth_to_local=RULE:[2:$1@$0](.*)s/.*/root/
-HDFS-SITE.XML_rpc.metrics.quantile.enable=true
-HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
-LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
-LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
-LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
-LOG4J.PROPERTIES_log4j.logger.org.apache.ratis.conf.ConfUtils=WARN
-LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.security.ShellBasedUnixGroupsMapping=ERROR
-
-#Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation.
-#BYTEMAN_SCRIPT_URL=https://raw.githubusercontent.com/apache/hadoop/trunk/dev-support/byteman/hadooprpc.btm
-
-#LOG4J2.PROPERTIES_* are for Ozone Audit Logging
-LOG4J2.PROPERTIES_monitorInterval=30
-LOG4J2.PROPERTIES_filter=read,write
-LOG4J2.PROPERTIES_filter.read.type=MarkerFilter
-LOG4J2.PROPERTIES_filter.read.marker=READ
-LOG4J2.PROPERTIES_filter.read.onMatch=DENY
-LOG4J2.PROPERTIES_filter.read.onMismatch=NEUTRAL
-LOG4J2.PROPERTIES_filter.write.type=MarkerFilter
-LOG4J2.PROPERTIES_filter.write.marker=WRITE
-LOG4J2.PROPERTIES_filter.write.onMatch=NEUTRAL
-LOG4J2.PROPERTIES_filter.write.onMismatch=NEUTRAL
-LOG4J2.PROPERTIES_appenders=console, rolling
-LOG4J2.PROPERTIES_appender.console.type=Console
-LOG4J2.PROPERTIES_appender.console.name=STDOUT
-LOG4J2.PROPERTIES_appender.console.layout.type=PatternLayout
-LOG4J2.PROPERTIES_appender.console.layout.pattern=%d{DEFAULT} | %-5level | %c{1} | %msg | %throwable{3} %n
-LOG4J2.PROPERTIES_appender.rolling.type=RollingFile
-LOG4J2.PROPERTIES_appender.rolling.name=RollingFile
-LOG4J2.PROPERTIES_appender.rolling.fileName =${sys:hadoop.log.dir}/om-audit-${hostName}.log
-LOG4J2.PROPERTIES_appender.rolling.filePattern=${sys:hadoop.log.dir}/om-audit-${hostName}-%d{yyyy-MM-dd-HH-mm-ss}-%i.log.gz
-LOG4J2.PROPERTIES_appender.rolling.layout.type=PatternLayout
-LOG4J2.PROPERTIES_appender.rolling.layout.pattern=%d{DEFAULT} | %-5level | %c{1} | %msg | %throwable{3} %n
-LOG4J2.PROPERTIES_appender.rolling.policies.type=Policies
-LOG4J2.PROPERTIES_appender.rolling.policies.time.type=TimeBasedTriggeringPolicy
-LOG4J2.PROPERTIES_appender.rolling.policies.time.interval=86400
-LOG4J2.PROPERTIES_appender.rolling.policies.size.type=SizeBasedTriggeringPolicy
-LOG4J2.PROPERTIES_appender.rolling.policies.size.size=64MB
-LOG4J2.PROPERTIES_loggers=audit
-LOG4J2.PROPERTIES_logger.audit.type=AsyncLogger
-LOG4J2.PROPERTIES_logger.audit.name=OMAudit
-LOG4J2.PROPERTIES_logger.audit.level=INFO
-LOG4J2.PROPERTIES_logger.audit.appenderRefs=rolling
-LOG4J2.PROPERTIES_logger.audit.appenderRef.file.ref=RollingFile
-LOG4J2.PROPERTIES_rootLogger.level=INFO
-LOG4J2.PROPERTIES_rootLogger.appenderRefs=stdout
-LOG4J2.PROPERTIES_rootLogger.appenderRef.stdout.ref=STDOUT
-
-OZONE_DATANODE_SECURE_USER=root
-CONF_DIR=/etc/security/keytabs
-KERBEROS_KEYTABS=dn om scm HTTP testuser
-KERBEROS_KEYSTORES=hadoop
-KERBEROS_SERVER=kdc
-JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
-JSVC_HOME=/usr/bin
-SLEEP_SECONDS=5
-KERBEROS_ENABLED=true

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
deleted file mode 100644
index efda03e..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License lsfor the specific language governing permissions and
-# limitations under the License.
-
-FROM openjdk:8-jdk
-RUN apt-get update && apt-get install -y  jq curl python sudo && apt-get clean
-
-RUN wget -O /usr/local/bin/dumb-init https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64
-RUN chmod +x /usr/local/bin/dumb-init
-RUN mkdir -p /etc/security/keytabs && chmod -R a+wr /etc/security/keytabs
-ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
-ENV PATH $PATH:/opt/hadoop/bin
-
-RUN apt-get install -y jsvc
-ENV JSVC_HOME=/usr/bin
-ADD scripts/krb5.conf /etc/
-RUN apt-get install -y krb5-user
-
-RUN addgroup --gid 1000 hadoop
-RUN adduser --disabled-password --gecos "" --uid 1000 hadoop --gid 1000 --home /opt/hadoop
-RUN echo "hadoop ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
-ADD scripts /opt/
-
-WORKDIR /opt/hadoop
-
-VOLUME /data
-ENTRYPOINT ["/usr/local/bin/dumb-init", "--", "/opt/starter.sh"]

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/build.sh b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
deleted file mode 100755
index abbdebe..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-set -e
-mkdir -p build
-if [ ! -d "$DIR/build/apache-rat-0.12" ]; then
-  wget "http://xenia.sote.hu/ftp/mirrors/www.apache.org/creadur/apache-rat-0.12/apache-rat-0.12-bin.tar.gz -O $DIR/build/apache-rat.tar.gz"
-  cd "$DIR"/build || exit
-  tar zvxf apache-rat.tar.gz
-fi
-java -jar "$DIR"/build/apache-rat-0.12/apache-rat-0.12.jar "$DIR" -e public -e apache-rat-0.12 -e .git -e .gitignore
-docker build -t ahadoop/runner .

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
deleted file mode 100755
index ad2e176..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/python
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""convert environment variables to config"""
-
-import os
-import re
-
-import argparse
-
-import sys
-import transformation
-
-class Simple(object):
-  """Simple conversion"""
-  def __init__(self, args):
-    parser = argparse.ArgumentParser()
-    parser.add_argument("--destination", help="Destination directory", required=True)
-    self.args = parser.parse_args(args=args)
-    # copy the default files to file.raw in destination directory
-
-    self.known_formats = ['xml', 'properties', 'yaml', 'yml', 'env', "sh", "cfg", 'conf']
-    self.output_dir = self.args.destination
-
-    self.configurables = {}
-
-  def destination_file_path(self, name, extension):
-    """destination file path"""
-    return os.path.join(self.output_dir, "{}.{}".format(name, extension))
-
-  def write_env_var(self, name, extension, key, value):
-    """Write environment variables"""
-    with open(self.destination_file_path(name, extension) + ".raw", "a") as myfile:
-      myfile.write("{}: {}\n".format(key, value))
-
-  def process_envs(self):
-    """Process environment variables"""
-    for key in os.environ.keys():
-      pattern = re.compile("[_\\.]")
-      parts = pattern.split(key)
-      extension = None
-      name = parts[0].lower()
-      if len(parts) > 1:
-        extension = parts[1].lower()
-        config_key = key[len(name) + len(extension) + 2:].strip()
-      if extension and "!" in extension:
-        splitted = extension.split("!")
-        extension = splitted[0]
-        fmt = splitted[1]
-        config_key = key[len(name) + len(extension) + len(fmt) + 3:].strip()
-      else:
-        fmt = extension
-
-      if extension and extension in self.known_formats:
-        if name not in self.configurables.keys():
-          with open(self.destination_file_path(name, extension) + ".raw", "w") as myfile:
-            myfile.write("")
-        self.configurables[name] = (extension, fmt)
-        self.write_env_var(name, extension, config_key, os.environ[key])
-      else:
-        for configurable_name in self.configurables:
-          if key.lower().startswith(configurable_name.lower()):
-            self.write_env_var(configurable_name,
-                               self.configurables[configurable_name],
-                               key[len(configurable_name) + 1:],
-                               os.environ[key])
-
-  def transform(self):
-    """transform"""
-    for configurable_name in self.configurables:
-      name = configurable_name
-      extension, fmt = self.configurables[name]
-
-      destination_path = self.destination_file_path(name, extension)
-
-      with open(destination_path + ".raw", "r") as myfile:
-        content = myfile.read()
-        transformer_func = getattr(transformation, "to_" + fmt)
-        content = transformer_func(content)
-        with open(destination_path, "w") as myfile:
-          myfile.write(content)
-
-  def main(self):
-    """main"""
-
-    # add the
-    self.process_envs()
-
-    # copy file.ext.raw to file.ext in the destination directory, and
-    # transform to the right format (eg. key: value ===> XML)
-    self.transform()
-
-
-def main():
-  """main"""
-  Simple(sys.argv[1:]).main()
-
-
-if __name__ == '__main__':
-  Simple(sys.argv[1:]).main()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
deleted file mode 100644
index 82ae73a..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-[logging]
- default = FILE:/var/log/krb5libs.log
- kdc = FILE:/var/log/krb5kdc.log
- admin_server = FILE:/var/log/kadmind.log
-
-[libdefaults]
- dns_canonicalize_hostname = false
- dns_lookup_realm = false
- ticket_lifetime = 24h
- renew_lifetime = 7d
- forwardable = true
- rdns = false
- default_realm = EXAMPLE.COM
-
-[realms]
- EXAMPLE.COM = {
-  kdc = SERVER
-  admin_server = SERVER
- }
-
-[domain_realm]
- .example.com = EXAMPLE.COM
- example.com = EXAMPLE.COM
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
deleted file mode 100755
index 04cd49d..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
+++ /dev/null
@@ -1,100 +0,0 @@
-#!/usr/bin/env bash
-##
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##
-set -e
-
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-echo "Setting up enviorment!!"
-
-if [ -n "$KERBEROS_ENABLED" ]; then
-  echo "Setting up kerberos!!"
-  KERBEROS_SERVER=${KERBEROS_SERVER:-krb5}
-  ISSUER_SERVER=${ISSUER_SERVER:-$KERBEROS_SERVER\:8081}
-
-  echo "KDC ISSUER_SERVER => $ISSUER_SERVER"
-
-  if [ -n "$SLEEP_SECONDS" ]; then
-    echo "Sleeping for $(SLEEP_SECONDS) seconds"
-    sleep "$SLEEP_SECONDS"
-  fi
-
-
-  while true
-    do
-      STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://"$ISSUER_SERVER"/keytab/test/test)
-      if [ "$STATUS" -eq 200 ]; then
-        echo "Got 200, KDC service ready!!"
-        break
-      else
-        echo "Got $STATUS :( KDC service not ready yet..."
-      fi
-      sleep 5
-    done
-
-    HOST_NAME=$(hostname -f)
-    export HOST_NAME
-    for NAME in ${KERBEROS_KEYTABS}; do
-      echo "Download $NAME/$HOSTNAME@EXAMPLE.COM keytab file to $CONF_DIR/$NAME.keytab"
-      wget "http://$ISSUER_SERVER/keytab/$HOST_NAME/$NAME" -O "$CONF_DIR/$NAME.keytab"
-      klist -kt "$CONF_DIR/$NAME.keytab"
-      KERBEROS_ENABLED=true
-    done
-
-    sed "s/SERVER/$KERBEROS_SERVER/g" "$DIR"/krb5.conf | sudo tee /etc/krb5.conf
-fi
-
-#To avoid docker volume permission problems
-sudo chmod o+rwx /data
-
-"$DIR"/envtoconf.py --destination /opt/hadoop/etc/hadoop
-
-if [ -n "$ENSURE_NAMENODE_DIR" ]; then
-  CLUSTERID_OPTS=""
-  if [ -n "$ENSURE_NAMENODE_CLUSTERID" ]; then
-    CLUSTERID_OPTS="-clusterid $ENSURE_NAMENODE_CLUSTERID"
-  fi
-  if [ ! -d "$ENSURE_NAMENODE_DIR" ]; then
-    /opt/hadoop/bin/hdfs namenode -format -force "$CLUSTERID_OPTS"
-  fi
-fi
-
-if [ -n "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
-  if [ ! -d "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
-    /opt/hadoop/bin/hdfs namenode -bootstrapStandby
-  fi
-fi
-
-if [ -n "$ENSURE_SCM_INITIALIZED" ]; then
-  if [ ! -f "$ENSURE_SCM_INITIALIZED" ]; then
-    /opt/hadoop/bin/ozone scm -init
-  fi
-fi
-
-if [ -n "$ENSURE_OM_INITIALIZED" ]; then
-  if [ ! -f "$ENSURE_OM_INITIALIZED" ]; then
-    #To make sure SCM is running in dockerized environment we will sleep
-    # Could be removed after HDFS-13203
-    echo "Waiting 15 seconds for SCM startup"
-    sleep 15
-    /opt/hadoop/bin/ozone om -createObjectStore
-  fi
-fi
-
-echo 'setup finished'
-"$@"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
----------------------------------------------------------------------
diff --git a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py b/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
deleted file mode 100755
index 5e708ce..0000000
--- a/hadoop-dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#!/usr/bin/python
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""This module transform properties into different format"""
-def render_yaml(yaml_root, prefix=""):
-  """render yaml"""
-  result = ""
-  if isinstance(yaml_root, dict):
-    if prefix:
-      result += "\n"
-      for key in yaml_root:
-        result += "{}{}: {}".format(prefix, key, render_yaml(
-            yaml_root[key], prefix + "   "))
-  elif isinstance(yaml_root, list):
-    result += "\n"
-    for item in yaml_root:
-      result += prefix + " - " + render_yaml(item, prefix + " ")
-  else:
-    result += "{}\n".format(yaml_root)
-  return result
-
-
-def to_yaml(content):
-  """transform to yaml"""
-  props = process_properties(content)
-
-  keys = props.keys()
-  yaml_props = {}
-  for key in keys:
-    parts = key.split(".")
-    node = yaml_props
-    prev_part = None
-    parent_node = {}
-    for part in parts[:-1]:
-      if part.isdigit():
-        if isinstance(node, dict):
-          parent_node[prev_part] = []
-          node = parent_node[prev_part]
-        while len(node) <= int(part):
-          node.append({})
-        parent_node = node
-        node = node[int(node)]
-      else:
-        if part not in node:
-          node[part] = {}
-        parent_node = node
-        node = node[part]
-      prev_part = part
-    if parts[-1].isdigit():
-      if isinstance(node, dict):
-        parent_node[prev_part] = []
-        node = parent_node[prev_part]
-      node.append(props[key])
-    else:
-      node[parts[-1]] = props[key]
-
-  return render_yaml(yaml_props)
-
-
-def to_yml(content):
-  """transform to yml"""
-  return to_yaml(content)
-
-
-def to_properties(content):
-  """transform to properties"""
-  result = ""
-  props = process_properties(content)
-  for key, val in props.items():
-    result += "{}: {}\n".format(key, val)
-  return result
-
-
-def to_env(content):
-  """transform to environment variables"""
-  result = ""
-  props = process_properties(content)
-  for key, val in props:
-    result += "{}={}\n".format(key, val)
-  return result
-
-
-def to_sh(content):
-  """transform to shell"""
-  result = ""
-  props = process_properties(content)
-  for key, val in props:
-    result += "export {}=\"{}\"\n".format(key, val)
-  return result
-
-
-def to_cfg(content):
-  """transform to config"""
-  result = ""
-  props = process_properties(content)
-  for key, val in props:
-    result += "{}={}\n".format(key, val)
-  return result
-
-
-def to_conf(content):
-  """transform to configuration"""
-  result = ""
-  props = process_properties(content)
-  for key, val in props:
-    result += "export {}={}\n".format(key, val)
-  return result
-
-
-def to_xml(content):
-  """transform to xml"""
-  result = "<configuration>\n"
-  props = process_properties(content)
-  for key in props:
-    result += "<property><name>{0}</name><value>{1}</value></property>\n". \
-      format(key, props[key])
-  result += "</configuration>"
-  return result
-
-
-def process_properties(content, sep=': ', comment_char='#'):
-  """
-  Read the file passed as parameter as a properties file.
-  """
-  props = {}
-  for line in content.split("\n"):
-    sline = line.strip()
-    if sline and not sline.startswith(comment_char):
-      key_value = sline.split(sep)
-      key = key_value[0].strip()
-      value = sep.join(key_value[1:]).strip().strip('"')
-      props[key] = value
-
-  return props

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/.env
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/.env b/hadoop-ozone/dist/src/main/compose/ozonesecure/.env
new file mode 100644
index 0000000..a494004
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/.env
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+HDDS_VERSION=${hdds.version}
+SRC_VOLUME=../../

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml
new file mode 100644
index 0000000..42ab05e
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-compose.yaml
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+version: "3"
+services:
+   kdc:
+      image: ahadoop/kdc:v1
+      hostname: kdc
+      volumes:
+          - $SRC_VOLUME:/opt/hadoop
+   datanode:
+      image: ahadoop/runner:latest
+      volumes:
+        - $SRC_VOLUME:/opt/hadoop
+      hostname: datanode
+      ports:
+        - 9864
+      command: ["/opt/hadoop/bin/ozone","datanode"]
+      env_file:
+        - ./docker-config
+   ozoneManager:
+      image: ahadoop/runner:latest
+      hostname: om
+      volumes:
+         - $SRC_VOLUME:/opt/hadoop
+      ports:
+         - 9874:9874
+      environment:
+         ENSURE_OM_INITIALIZED: /data/metadata/ozoneManager/current/VERSION
+      env_file:
+          - ./docker-config
+      command: ["/opt/hadoop/bin/ozone","om"]
+   scm:
+      image: ahadoop/runner:latest
+      hostname: scm
+      volumes:
+         - $SRC_VOLUME:/opt/hadoop
+      ports:
+         - 9876:9876
+      env_file:
+          - ./docker-config
+      environment:
+          ENSURE_SCM_INITIALIZED: /data/metadata/scm/current/VERSION
+      command: ["/opt/hadoop/bin/ozone","scm"]

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
new file mode 100644
index 0000000..704dc7b
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-config
@@ -0,0 +1,103 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+OZONE-SITE.XML_ozone.om.address=om
+OZONE-SITE.XML_ozone.om.http-address=om:9874
+OZONE-SITE.XML_ozone.scm.names=scm
+OZONE-SITE.XML_ozone.enabled=True
+OZONE-SITE.XML_ozone.scm.datanode.id=/data/datanode.id
+OZONE-SITE.XML_ozone.scm.block.client.address=scm
+OZONE-SITE.XML_ozone.metadata.dirs=/data/metadata
+OZONE-SITE.XML_ozone.handler.type=distributed
+OZONE-SITE.XML_ozone.scm.client.address=scm
+OZONE-SITE.XML_ozone.replication=1
+OZONE-SITE.XML_hdds.scm.kerberos.principal=scm/scm@EXAMPLE.COM
+OZONE-SITE.XML_hdds.scm.kerberos.keytab.file=/etc/security/keytabs/scm.keytab
+OZONE-SITE.XML_ozone.om.kerberos.principal=om/om@EXAMPLE.COM
+OZONE-SITE.XML_ozone.om.kerberos.keytab.file=/etc/security/keytabs/om.keytab
+OZONE-SITE.XML_ozone.security.enabled=true
+OZONE-SITE.XML_hdds.scm.http.kerberos.principal=HTTP/scm@EXAMPLE.COM
+OZONE-SITE.XML_hdds.scm.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+OZONE-SITE.XML_ozone.om.http.kerberos.principal=HTTP/om@EXAMPLE.COM
+OZONE-SITE.XML_ozone.om.http.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+HDFS-SITE.XML_dfs.datanode.kerberos.principal=dn/_HOST@EXAMPLE.COM
+HDFS-SITE.XML_dfs.datanode.keytab.file=/etc/security/keytabs/dn.keytab
+HDFS-SITE.XML_dfs.web.authentication.kerberos.principal=HTTP/_HOST@EXAMPLE.COM
+HDFS-SITE.XML_dfs.web.authentication.kerberos.keytab=/etc/security/keytabs/HTTP.keytab
+HDFS-SITE.XML_dfs.datanode.address=0.0.0.0:1019
+HDFS-SITE.XML_dfs.datanode.http.address=0.0.0.0:1012
+CORE-SITE.XML_dfs.data.transfer.protection=authentication
+CORE-SITE.XML_hadoop.security.authentication=kerberos
+CORE-SITE.XML_hadoop.security.auth_to_local=RULE:[2:$1@$0](.*)s/.*/root/
+HDFS-SITE.XML_rpc.metrics.quantile.enable=true
+HDFS-SITE.XML_rpc.metrics.percentiles.intervals=60,300
+LOG4J.PROPERTIES_log4j.rootLogger=INFO, stdout
+LOG4J.PROPERTIES_log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+LOG4J.PROPERTIES_log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+LOG4J.PROPERTIES_log4j.appender.stdout.layout.ConversionPattern=%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n
+LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
+LOG4J.PROPERTIES_log4j.logger.org.apache.ratis.conf.ConfUtils=WARN
+LOG4J.PROPERTIES_log4j.logger.org.apache.hadoop.security.ShellBasedUnixGroupsMapping=ERROR
+
+#Enable this variable to print out all hadoop rpc traffic to the stdout. See http://byteman.jboss.org/ to define your own instrumentation.
+#BYTEMAN_SCRIPT_URL=https://raw.githubusercontent.com/apache/hadoop/trunk/dev-support/byteman/hadooprpc.btm
+
+#LOG4J2.PROPERTIES_* are for Ozone Audit Logging
+LOG4J2.PROPERTIES_monitorInterval=30
+LOG4J2.PROPERTIES_filter=read,write
+LOG4J2.PROPERTIES_filter.read.type=MarkerFilter
+LOG4J2.PROPERTIES_filter.read.marker=READ
+LOG4J2.PROPERTIES_filter.read.onMatch=DENY
+LOG4J2.PROPERTIES_filter.read.onMismatch=NEUTRAL
+LOG4J2.PROPERTIES_filter.write.type=MarkerFilter
+LOG4J2.PROPERTIES_filter.write.marker=WRITE
+LOG4J2.PROPERTIES_filter.write.onMatch=NEUTRAL
+LOG4J2.PROPERTIES_filter.write.onMismatch=NEUTRAL
+LOG4J2.PROPERTIES_appenders=console, rolling
+LOG4J2.PROPERTIES_appender.console.type=Console
+LOG4J2.PROPERTIES_appender.console.name=STDOUT
+LOG4J2.PROPERTIES_appender.console.layout.type=PatternLayout
+LOG4J2.PROPERTIES_appender.console.layout.pattern=%d{DEFAULT} | %-5level | %c{1} | %msg | %throwable{3} %n
+LOG4J2.PROPERTIES_appender.rolling.type=RollingFile
+LOG4J2.PROPERTIES_appender.rolling.name=RollingFile
+LOG4J2.PROPERTIES_appender.rolling.fileName =${sys:hadoop.log.dir}/om-audit-${hostName}.log
+LOG4J2.PROPERTIES_appender.rolling.filePattern=${sys:hadoop.log.dir}/om-audit-${hostName}-%d{yyyy-MM-dd-HH-mm-ss}-%i.log.gz
+LOG4J2.PROPERTIES_appender.rolling.layout.type=PatternLayout
+LOG4J2.PROPERTIES_appender.rolling.layout.pattern=%d{DEFAULT} | %-5level | %c{1} | %msg | %throwable{3} %n
+LOG4J2.PROPERTIES_appender.rolling.policies.type=Policies
+LOG4J2.PROPERTIES_appender.rolling.policies.time.type=TimeBasedTriggeringPolicy
+LOG4J2.PROPERTIES_appender.rolling.policies.time.interval=86400
+LOG4J2.PROPERTIES_appender.rolling.policies.size.type=SizeBasedTriggeringPolicy
+LOG4J2.PROPERTIES_appender.rolling.policies.size.size=64MB
+LOG4J2.PROPERTIES_loggers=audit
+LOG4J2.PROPERTIES_logger.audit.type=AsyncLogger
+LOG4J2.PROPERTIES_logger.audit.name=OMAudit
+LOG4J2.PROPERTIES_logger.audit.level=INFO
+LOG4J2.PROPERTIES_logger.audit.appenderRefs=rolling
+LOG4J2.PROPERTIES_logger.audit.appenderRef.file.ref=RollingFile
+LOG4J2.PROPERTIES_rootLogger.level=INFO
+LOG4J2.PROPERTIES_rootLogger.appenderRefs=stdout
+LOG4J2.PROPERTIES_rootLogger.appenderRef.stdout.ref=STDOUT
+
+OZONE_DATANODE_SECURE_USER=root
+CONF_DIR=/etc/security/keytabs
+KERBEROS_KEYTABS=dn om scm HTTP testuser
+KERBEROS_KEYSTORES=hadoop
+KERBEROS_SERVER=kdc
+JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
+JSVC_HOME=/usr/bin
+SLEEP_SECONDS=5
+KERBEROS_ENABLED=true

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
new file mode 100644
index 0000000..efda03e
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/Dockerfile
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License lsfor the specific language governing permissions and
+# limitations under the License.
+
+FROM openjdk:8-jdk
+RUN apt-get update && apt-get install -y  jq curl python sudo && apt-get clean
+
+RUN wget -O /usr/local/bin/dumb-init https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64
+RUN chmod +x /usr/local/bin/dumb-init
+RUN mkdir -p /etc/security/keytabs && chmod -R a+wr /etc/security/keytabs
+ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/
+ENV PATH $PATH:/opt/hadoop/bin
+
+RUN apt-get install -y jsvc
+ENV JSVC_HOME=/usr/bin
+ADD scripts/krb5.conf /etc/
+RUN apt-get install -y krb5-user
+
+RUN addgroup --gid 1000 hadoop
+RUN adduser --disabled-password --gecos "" --uid 1000 hadoop --gid 1000 --home /opt/hadoop
+RUN echo "hadoop ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
+ADD scripts /opt/
+
+WORKDIR /opt/hadoop
+
+VOLUME /data
+ENTRYPOINT ["/usr/local/bin/dumb-init", "--", "/opt/starter.sh"]

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/build.sh b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
new file mode 100755
index 0000000..abbdebe
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/build.sh
@@ -0,0 +1,26 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+set -e
+mkdir -p build
+if [ ! -d "$DIR/build/apache-rat-0.12" ]; then
+  wget "http://xenia.sote.hu/ftp/mirrors/www.apache.org/creadur/apache-rat-0.12/apache-rat-0.12-bin.tar.gz -O $DIR/build/apache-rat.tar.gz"
+  cd "$DIR"/build || exit
+  tar zvxf apache-rat.tar.gz
+fi
+java -jar "$DIR"/build/apache-rat-0.12/apache-rat-0.12.jar "$DIR" -e public -e apache-rat-0.12 -e .git -e .gitignore
+docker build -t ahadoop/runner .

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
new file mode 100755
index 0000000..ad2e176
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/envtoconf.py
@@ -0,0 +1,115 @@
+#!/usr/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""convert environment variables to config"""
+
+import os
+import re
+
+import argparse
+
+import sys
+import transformation
+
+class Simple(object):
+  """Simple conversion"""
+  def __init__(self, args):
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--destination", help="Destination directory", required=True)
+    self.args = parser.parse_args(args=args)
+    # copy the default files to file.raw in destination directory
+
+    self.known_formats = ['xml', 'properties', 'yaml', 'yml', 'env', "sh", "cfg", 'conf']
+    self.output_dir = self.args.destination
+
+    self.configurables = {}
+
+  def destination_file_path(self, name, extension):
+    """destination file path"""
+    return os.path.join(self.output_dir, "{}.{}".format(name, extension))
+
+  def write_env_var(self, name, extension, key, value):
+    """Write environment variables"""
+    with open(self.destination_file_path(name, extension) + ".raw", "a") as myfile:
+      myfile.write("{}: {}\n".format(key, value))
+
+  def process_envs(self):
+    """Process environment variables"""
+    for key in os.environ.keys():
+      pattern = re.compile("[_\\.]")
+      parts = pattern.split(key)
+      extension = None
+      name = parts[0].lower()
+      if len(parts) > 1:
+        extension = parts[1].lower()
+        config_key = key[len(name) + len(extension) + 2:].strip()
+      if extension and "!" in extension:
+        splitted = extension.split("!")
+        extension = splitted[0]
+        fmt = splitted[1]
+        config_key = key[len(name) + len(extension) + len(fmt) + 3:].strip()
+      else:
+        fmt = extension
+
+      if extension and extension in self.known_formats:
+        if name not in self.configurables.keys():
+          with open(self.destination_file_path(name, extension) + ".raw", "w") as myfile:
+            myfile.write("")
+        self.configurables[name] = (extension, fmt)
+        self.write_env_var(name, extension, config_key, os.environ[key])
+      else:
+        for configurable_name in self.configurables:
+          if key.lower().startswith(configurable_name.lower()):
+            self.write_env_var(configurable_name,
+                               self.configurables[configurable_name],
+                               key[len(configurable_name) + 1:],
+                               os.environ[key])
+
+  def transform(self):
+    """transform"""
+    for configurable_name in self.configurables:
+      name = configurable_name
+      extension, fmt = self.configurables[name]
+
+      destination_path = self.destination_file_path(name, extension)
+
+      with open(destination_path + ".raw", "r") as myfile:
+        content = myfile.read()
+        transformer_func = getattr(transformation, "to_" + fmt)
+        content = transformer_func(content)
+        with open(destination_path, "w") as myfile:
+          myfile.write(content)
+
+  def main(self):
+    """main"""
+
+    # add the
+    self.process_envs()
+
+    # copy file.ext.raw to file.ext in the destination directory, and
+    # transform to the right format (eg. key: value ===> XML)
+    self.transform()
+
+
+def main():
+  """main"""
+  Simple(sys.argv[1:]).main()
+
+
+if __name__ == '__main__':
+  Simple(sys.argv[1:]).main()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
new file mode 100644
index 0000000..82ae73a
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/krb5.conf
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+[logging]
+ default = FILE:/var/log/krb5libs.log
+ kdc = FILE:/var/log/krb5kdc.log
+ admin_server = FILE:/var/log/kadmind.log
+
+[libdefaults]
+ dns_canonicalize_hostname = false
+ dns_lookup_realm = false
+ ticket_lifetime = 24h
+ renew_lifetime = 7d
+ forwardable = true
+ rdns = false
+ default_realm = EXAMPLE.COM
+
+[realms]
+ EXAMPLE.COM = {
+  kdc = SERVER
+  admin_server = SERVER
+ }
+
+[domain_realm]
+ .example.com = EXAMPLE.COM
+ example.com = EXAMPLE.COM
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
new file mode 100755
index 0000000..04cd49d
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/starter.sh
@@ -0,0 +1,100 @@
+#!/usr/bin/env bash
+##
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+set -e
+
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+echo "Setting up enviorment!!"
+
+if [ -n "$KERBEROS_ENABLED" ]; then
+  echo "Setting up kerberos!!"
+  KERBEROS_SERVER=${KERBEROS_SERVER:-krb5}
+  ISSUER_SERVER=${ISSUER_SERVER:-$KERBEROS_SERVER\:8081}
+
+  echo "KDC ISSUER_SERVER => $ISSUER_SERVER"
+
+  if [ -n "$SLEEP_SECONDS" ]; then
+    echo "Sleeping for $(SLEEP_SECONDS) seconds"
+    sleep "$SLEEP_SECONDS"
+  fi
+
+
+  while true
+    do
+      STATUS=$(curl -s -o /dev/null -w '%{http_code}' http://"$ISSUER_SERVER"/keytab/test/test)
+      if [ "$STATUS" -eq 200 ]; then
+        echo "Got 200, KDC service ready!!"
+        break
+      else
+        echo "Got $STATUS :( KDC service not ready yet..."
+      fi
+      sleep 5
+    done
+
+    HOST_NAME=$(hostname -f)
+    export HOST_NAME
+    for NAME in ${KERBEROS_KEYTABS}; do
+      echo "Download $NAME/$HOSTNAME@EXAMPLE.COM keytab file to $CONF_DIR/$NAME.keytab"
+      wget "http://$ISSUER_SERVER/keytab/$HOST_NAME/$NAME" -O "$CONF_DIR/$NAME.keytab"
+      klist -kt "$CONF_DIR/$NAME.keytab"
+      KERBEROS_ENABLED=true
+    done
+
+    sed "s/SERVER/$KERBEROS_SERVER/g" "$DIR"/krb5.conf | sudo tee /etc/krb5.conf
+fi
+
+#To avoid docker volume permission problems
+sudo chmod o+rwx /data
+
+"$DIR"/envtoconf.py --destination /opt/hadoop/etc/hadoop
+
+if [ -n "$ENSURE_NAMENODE_DIR" ]; then
+  CLUSTERID_OPTS=""
+  if [ -n "$ENSURE_NAMENODE_CLUSTERID" ]; then
+    CLUSTERID_OPTS="-clusterid $ENSURE_NAMENODE_CLUSTERID"
+  fi
+  if [ ! -d "$ENSURE_NAMENODE_DIR" ]; then
+    /opt/hadoop/bin/hdfs namenode -format -force "$CLUSTERID_OPTS"
+  fi
+fi
+
+if [ -n "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
+  if [ ! -d "$ENSURE_STANDBY_NAMENODE_DIR" ]; then
+    /opt/hadoop/bin/hdfs namenode -bootstrapStandby
+  fi
+fi
+
+if [ -n "$ENSURE_SCM_INITIALIZED" ]; then
+  if [ ! -f "$ENSURE_SCM_INITIALIZED" ]; then
+    /opt/hadoop/bin/ozone scm -init
+  fi
+fi
+
+if [ -n "$ENSURE_OM_INITIALIZED" ]; then
+  if [ ! -f "$ENSURE_OM_INITIALIZED" ]; then
+    #To make sure SCM is running in dockerized environment we will sleep
+    # Could be removed after HDFS-13203
+    echo "Waiting 15 seconds for SCM startup"
+    sleep 15
+    /opt/hadoop/bin/ozone om -createObjectStore
+  fi
+fi
+
+echo 'setup finished'
+"$@"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fed478a9/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
new file mode 100755
index 0000000..5e708ce
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/compose/ozonesecure/docker-image/runner/scripts/transformation.py
@@ -0,0 +1,150 @@
+#!/usr/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""This module transform properties into different format"""
+def render_yaml(yaml_root, prefix=""):
+  """render yaml"""
+  result = ""
+  if isinstance(yaml_root, dict):
+    if prefix:
+      result += "\n"
+      for key in yaml_root:
+        result += "{}{}: {}".format(prefix, key, render_yaml(
+            yaml_root[key], prefix + "   "))
+  elif isinstance(yaml_root, list):
+    result += "\n"
+    for item in yaml_root:
+      result += prefix + " - " + render_yaml(item, prefix + " ")
+  else:
+    result += "{}\n".format(yaml_root)
+  return result
+
+
+def to_yaml(content):
+  """transform to yaml"""
+  props = process_properties(content)
+
+  keys = props.keys()
+  yaml_props = {}
+  for key in keys:
+    parts = key.split(".")
+    node = yaml_props
+    prev_part = None
+    parent_node = {}
+    for part in parts[:-1]:
+      if part.isdigit():
+        if isinstance(node, dict):
+          parent_node[prev_part] = []
+          node = parent_node[prev_part]
+        while len(node) <= int(part):
+          node.append({})
+        parent_node = node
+        node = node[int(node)]
+      else:
+        if part not in node:
+          node[part] = {}
+        parent_node = node
+        node = node[part]
+      prev_part = part
+    if parts[-1].isdigit():
+      if isinstance(node, dict):
+        parent_node[prev_part] = []
+        node = parent_node[prev_part]
+      node.append(props[key])
+    else:
+      node[parts[-1]] = props[key]
+
+  return render_yaml(yaml_props)
+
+
+def to_yml(content):
+  """transform to yml"""
+  return to_yaml(content)
+
+
+def to_properties(content):
+  """transform to properties"""
+  result = ""
+  props = process_properties(content)
+  for key, val in props.items():
+    result += "{}: {}\n".format(key, val)
+  return result
+
+
+def to_env(content):
+  """transform to environment variables"""
+  result = ""
+  props = process_properties(content)
+  for key, val in props:
+    result += "{}={}\n".format(key, val)
+  return result
+
+
+def to_sh(content):
+  """transform to shell"""
+  result = ""
+  props = process_properties(content)
+  for key, val in props:
+    result += "export {}=\"{}\"\n".format(key, val)
+  return result
+
+
+def to_cfg(content):
+  """transform to config"""
+  result = ""
+  props = process_properties(content)
+  for key, val in props:
+    result += "{}={}\n".format(key, val)
+  return result
+
+
+def to_conf(content):
+  """transform to configuration"""
+  result = ""
+  props = process_properties(content)
+  for key, val in props:
+    result += "export {}={}\n".format(key, val)
+  return result
+
+
+def to_xml(content):
+  """transform to xml"""
+  result = "<configuration>\n"
+  props = process_properties(content)
+  for key in props:
+    result += "<property><name>{0}</name><value>{1}</value></property>\n". \
+      format(key, props[key])
+  result += "</configuration>"
+  return result
+
+
+def process_properties(content, sep=': ', comment_char='#'):
+  """
+  Read the file passed as parameter as a properties file.
+  """
+  props = {}
+  for line in content.split("\n"):
+    sline = line.strip()
+    if sline and not sline.startswith(comment_char):
+      key_value = sline.split(sep)
+      key = key_value[0].strip()
+      value = sep.join(key_value[1:]).strip().strip('"')
+      props[key] = value
+
+  return props


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message