ignite-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From sboi...@apache.org
Subject [10/50] [abbrv] ignite git commit: IGNITE-3184: Hadoop: HADOOP_HOME is no longer required is all HADOOP_*_HOME environment variables are set.
Date Thu, 07 Jul 2016 06:01:35 GMT
IGNITE-3184: Hadoop: HADOOP_HOME is no longer required is all HADOOP_*_HOME environment variables
are set.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/26eec3a0
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/26eec3a0
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/26eec3a0

Branch: refs/heads/ignite-1232
Commit: 26eec3a00171418391dcf9db1ac3299aaf003ee4
Parents: 00c3383
Author: vozerov-gridgain <vozerov@gridgain.com>
Authored: Mon Jun 27 13:44:38 2016 +0300
Committer: vozerov-gridgain <vozerov@gridgain.com>
Committed: Thu Jun 30 10:18:48 2016 +0300

----------------------------------------------------------------------
 bin/include/hadoop-classpath.bat                |  73 ------
 bin/include/hadoop-classpath.sh                 |  65 ------
 bin/include/setenv.bat                          |   6 +-
 bin/include/setenv.sh                           |  25 +-
 .../processors/hadoop/HadoopClassLoader.java    |  73 +-----
 .../processors/hadoop/HadoopClasspathMain.java  |  44 ++++
 .../processors/hadoop/HadoopClasspathUtils.java | 233 +++++++++++++++++++
 .../processors/hadoop/HadoopLocations.java      |  78 +++++++
 .../processors/hadoop/HadoopProcessor.java      |  47 ++--
 9 files changed, 404 insertions(+), 240 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/bin/include/hadoop-classpath.bat
----------------------------------------------------------------------
diff --git a/bin/include/hadoop-classpath.bat b/bin/include/hadoop-classpath.bat
deleted file mode 100644
index dd293a7..0000000
--- a/bin/include/hadoop-classpath.bat
+++ /dev/null
@@ -1,73 +0,0 @@
-::
-:: Licensed to the Apache Software Foundation (ASF) under one or more
-:: contributor license agreements.  See the NOTICE file distributed with
-:: this work for additional information regarding copyright ownership.
-:: The ASF licenses this file to You under the Apache License, Version 2.0
-:: (the "License"); you may not use this file except in compliance with
-:: the License.  You may obtain a copy of the License at
-::
-::      http://www.apache.org/licenses/LICENSE-2.0
-::
-:: Unless required by applicable law or agreed to in writing, software
-:: distributed under the License is distributed on an "AS IS" BASIS,
-:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-:: See the License for the specific language governing permissions and
-:: limitations under the License.
-::
-
-:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
-::                 Hadoop class path resolver.
-::  Requires environment variables 'HADOOP_PREFIX' or 'HADOOP_HOME'
-::  to be set.
-:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
-
-:: Turn off script echoing.
-@echo off
-
-:: Check if environment passes deprecated HADOOP_HOME.
-if not defined HADOOP_PREFIX set HADOOP_PREFIX=%HADOOP_HOME%
-
-:: Exit if we cannot find Hadoop installation directory.
-if not defined HADOOP_PREFIX goto :eof
-
-:: Trim quotes.
-set HADOOP_PREFIX=%HADOOP_PREFIX:"=%
-
-:: Trim slashes.
-if %HADOOP_PREFIX:~-1,1% == \ (
-    set HADOOP_PREFIX=%HADOOP_PREFIX:~0,-1%
-)
-
-::
-:: Setting the rest of Hadoop environment variables.
-::
-
-if not defined HADOOP_COMMON_HOME set HADOOP_COMMON_HOME=%HADOOP_PREFIX%\share\hadoop\common
-if not defined HADOOP_HDFS_HOME set HADOOP_HDFS_HOME=%HADOOP_PREFIX%\share\hadoop\hdfs
-if not defined HADOOP_MAPRED_HOME set HADOOP_MAPRED_HOME=%HADOOP_PREFIX%\share\hadoop\mapreduce
-
-::
-:: Libraries included in classpath.
-::
-
-set CP=%HADOOP_COMMON_HOME%\lib\*;%HADOOP_MAPRED_HOME%\lib\*;%HADOOP_MAPRED_HOME%\lib\*
-
-:: hadoop-auth-* jar can be located either in home or in home/lib directory, depending on
the hadoop version.
-for /f %%f in ('dir /B %HADOOP_COMMON_HOME%\hadoop-auth-* ^>nul 2^>^&1') do call
:concat %HADOOP_COMMON_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_COMMON_HOME%\lib\hadoop-auth-* ^>nul 2^>^&1') do
call :concat %HADOOP_COMMON_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_COMMON_HOME%\hadoop-common-*') do call :concat %HADOOP_COMMON_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_HDFS_HOME%\hadoop-hdfs-*') do call :concat %HADOOP_HDFS_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_MAPRED_HOME%\hadoop-mapreduce-client-common-*') do call :concat
%HADOOP_MAPRED_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_MAPRED_HOME%\hadoop-mapreduce-client-core-*') do call :concat
%HADOOP_MAPRED_HOME%\%%f
-
-:: Export result.
-set IGNITE_HADOOP_CLASSPATH=%CP%
-
-:: Exit.
-goto :eof
-
-:: Function that adds jar dependency into classpath.
-:concat
-    set file=%1
-    if %file:~-9,9% neq tests.jar set CP=%CP%;%1
-goto :eof

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/bin/include/hadoop-classpath.sh
----------------------------------------------------------------------
diff --git a/bin/include/hadoop-classpath.sh b/bin/include/hadoop-classpath.sh
deleted file mode 100755
index 3a0aeb1..0000000
--- a/bin/include/hadoop-classpath.sh
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-####################################################################
-#                 Hadoop class path resolver.
-#  Requires environment variables 'HADOOP_PREFIX' or 'HADOOP_HOME'
-#  to be set. If they are both undefined , tries to read them from
-#  from '/etc/default/hadoop' file. The final results are printed
-#  into standard output.
-####################################################################
-
-# Resolve constants.
-HADOOP_DEFAULTS="/etc/default/hadoop"
-HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_HOME}
-
-# Try get all variables from Hadoop default environment config
-# if they have not been passed into the script.
-if [[ -z "$HADOOP_PREFIX" && -f "$HADOOP_DEFAULTS" ]]; then
-    source "$HADOOP_DEFAULTS"
-fi
-
-# Return if Hadoop couldn't be found.
-[ -z "$HADOOP_PREFIX" ] && return
-
-#
-# Resolve the rest of Hadoop environment variables.
-#
-
-HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME-"${HADOOP_PREFIX}/share/hadoop/common"}
-HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME-"${HADOOP_PREFIX}/share/hadoop/hdfs"}
-HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME-"${HADOOP_PREFIX}/share/hadoop/mapreduce"}
-
-#
-# Calculate classpath string with required Hadoop libraries.
-#
-
-# Add all Hadoop libs.
-IGNITE_HADOOP_CLASSPATH="${HADOOP_COMMON_HOME}/lib/*${SEP}${HADOOP_MAPRED_HOME}/lib/*${SEP}${HADOOP_MAPRED_HOME}/lib/*"
-
-# Skip globbing pattern if it cannot be resolved.
-shopt -s nullglob
-
-# Add jars to classpath excluding tests.
-# hadoop-auth-* jar can be located either in home or in home/lib directory, depending on
the hadoop version.
-for file in ${HADOOP_HDFS_HOME}/hadoop-hdfs-* \
-            ${HADOOP_COMMON_HOME}/hadoop-{common,auth}-* \
-            ${HADOOP_COMMON_HOME}/lib/hadoop-auth-* \
-            ${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-{common,core}-*; do
-    [[ "$file" != *-tests.jar ]] && IGNITE_HADOOP_CLASSPATH=${IGNITE_HADOOP_CLASSPATH}${SEP}${file}
-done

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/bin/include/setenv.bat
----------------------------------------------------------------------
diff --git a/bin/include/setenv.bat b/bin/include/setenv.bat
index 811bc95..dec1ee5 100644
--- a/bin/include/setenv.bat
+++ b/bin/include/setenv.bat
@@ -47,11 +47,9 @@ if exist %IGNITE_HOME%\libs\ignite-hadoop set HADOOP_EDITION=1
 
 if defined USER_LIBS set IGNITE_LIBS=%USER_LIBS%;%IGNITE_LIBS%
 
-if "%HADOOP_EDITION%" == "1" call "%SCRIPTS_HOME%\include\hadoop-classpath.bat"
+FOR /F "delims=" %%i IN ('%JAVA_HOME%\bin\java.exe -cp %IGNITE_HOME%\libs\ignite-hadoop\*
org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ";"' ) DO set IGNITE_HADOOP_CLASSPATH=%%i
 
-set COMMON_HOME_LIB=%HADOOP_COMMON_HOME%\lib
-
-if "%IGNITE_HADOOP_CLASSPATH%" == "" goto :eof
+if "[%IGNITE_HADOOP_CLASSPATH%]" == "[]" exit 1
 
 set IGNITE_LIBS=%IGNITE_LIBS%;%IGNITE_HADOOP_CLASSPATH%
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/bin/include/setenv.sh
----------------------------------------------------------------------
diff --git a/bin/include/setenv.sh b/bin/include/setenv.sh
index 74b6608..a85cba3 100755
--- a/bin/include/setenv.sh
+++ b/bin/include/setenv.sh
@@ -72,9 +72,28 @@ if [ "${USER_LIBS}" != "" ]; then
 fi
 
 if [ "${HADOOP_EDITION}" == "1" ]; then
-    . "${SCRIPTS_HOME}"/include/hadoop-classpath.sh
+    # Resolve constants.
+    HADOOP_DEFAULTS="/etc/default/hadoop"
 
-    if [ "${IGNITE_HADOOP_CLASSPATH}" != "" ]; then
-        IGNITE_LIBS=${IGNITE_LIBS}${SEP}$IGNITE_HADOOP_CLASSPATH
+    #
+    # Resolve the rest of Hadoop environment variables.
+    #
+    if [[ -z "${HADOOP_COMMON_HOME}" || -z "${HADOOP_HDFS_HOME}" || -z "${HADOOP_MAPRED_HOME}"
]]; then
+        if [ -f "$HADOOP_DEFAULTS" ]; then
+            source "$HADOOP_DEFAULTS"
+        fi
     fi
+
+    IGNITE_HADOOP_CLASSPATH=$( "$JAVA" -cp "${IGNITE_HOME}"/libs/ignite-hadoop/'*' \
+        org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ":" )
+
+    statusCode=${?}
+
+    if [ "${statusCode}" -ne 0 ]; then
+       exit ${statusCode}
+    fi
+
+    unset statusCode
+
+    IGNITE_LIBS=${IGNITE_LIBS}${SEP}${IGNITE_HADOOP_CLASSPATH}
 fi

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index 340b35b..1c844c4 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -17,11 +17,8 @@
 
 package org.apache.ignite.internal.processors.hadoop;
 
-import java.io.File;
-import java.io.FilenameFilter;
 import java.io.IOException;
 import java.io.InputStream;
-import java.net.MalformedURLException;
 import java.net.URL;
 import java.net.URLClassLoader;
 import java.util.ArrayList;
@@ -460,40 +457,6 @@ public class HadoopClassLoader extends URLClassLoader implements ClassCache
{
     }
 
     /**
-     * @param name Variable name.
-     * @param dflt Default.
-     * @return Value.
-     */
-    private static String getEnv(String name, String dflt) {
-        String res = System.getProperty(name);
-
-        if (F.isEmpty(res))
-            res = System.getenv(name);
-
-        return F.isEmpty(res) ? dflt : res;
-    }
-
-    /**
-     * @param res Result.
-     * @param dir Directory.
-     * @param startsWith Starts with prefix.
-     * @throws MalformedURLException If failed.
-     */
-    private static void addUrls(Collection<URL> res, File dir, final String startsWith)
throws Exception {
-        File[] files = dir.listFiles(new FilenameFilter() {
-            @Override public boolean accept(File dir, String name) {
-                return startsWith == null || name.startsWith(startsWith);
-            }
-        });
-
-        if (files == null)
-            throw new IOException("Path is not a directory: " + dir);
-
-        for (File file : files)
-            res.add(file.toURI().toURL());
-    }
-
-    /**
      * @param urls URLs.
      * @return URLs.
      */
@@ -519,13 +482,6 @@ public class HadoopClassLoader extends URLClassLoader implements ClassCache
{
     }
 
     /**
-     * @return HADOOP_HOME Variable.
-     */
-    @Nullable public static String hadoopHome() {
-        return getEnv("HADOOP_PREFIX", getEnv("HADOOP_HOME", null));
-    }
-
-    /**
      * @return Collection of jar URLs.
      * @throws IgniteCheckedException If failed.
      */
@@ -541,34 +497,11 @@ public class HadoopClassLoader extends URLClassLoader implements ClassCache
{
             if (hadoopUrls != null)
                 return hadoopUrls;
 
-            hadoopUrls = new ArrayList<>();
-
-            String hadoopPrefix = hadoopHome();
-
-            if (F.isEmpty(hadoopPrefix))
-                throw new IgniteCheckedException("Failed resolve Hadoop installation location.
Either HADOOP_PREFIX or " +
-                    "HADOOP_HOME environment variables must be set.");
-
-            String commonHome = getEnv("HADOOP_COMMON_HOME", hadoopPrefix + "/share/hadoop/common");
-            String hdfsHome = getEnv("HADOOP_HDFS_HOME", hadoopPrefix + "/share/hadoop/hdfs");
-            String mapredHome = getEnv("HADOOP_MAPRED_HOME", hadoopPrefix + "/share/hadoop/mapreduce");
-
             try {
-                addUrls(hadoopUrls, new File(commonHome + "/lib"), null);
-                addUrls(hadoopUrls, new File(hdfsHome + "/lib"), null);
-                addUrls(hadoopUrls, new File(mapredHome + "/lib"), null);
-
-                addUrls(hadoopUrls, new File(hdfsHome), "hadoop-hdfs-");
-
-                addUrls(hadoopUrls, new File(commonHome), "hadoop-common-");
-                addUrls(hadoopUrls, new File(commonHome), "hadoop-auth-");
-                addUrls(hadoopUrls, new File(commonHome + "/lib"), "hadoop-auth-");
-
-                addUrls(hadoopUrls, new File(mapredHome), "hadoop-mapreduce-client-common");
-                addUrls(hadoopUrls, new File(mapredHome), "hadoop-mapreduce-client-core");
+                hadoopUrls = HadoopClasspathUtils.classpathUrls();
             }
-            catch (Exception e) {
-                throw new IgniteCheckedException(e);
+            catch (IOException e) {
+                throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations.",
e);
             }
 
             hadoopJars = hadoopUrls;

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
new file mode 100644
index 0000000..5279b7d
--- /dev/null
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+/**
+ * Main class to compose Hadoop classpath depending on the environment.
+ * This class is designed to be independent on any Ignite classes as possible.
+ * Please make sure to pass the path separator character as the 1st parameter to the main
method.
+ */
+public class HadoopClasspathMain {
+    /**
+     * Main method to be executed from scripts. It prints the classpath to the standard output.
+     *
+     * @param args The 1st argument should be the path separator character (":" on Linux,
";" on Windows).
+     */
+    public static void main(String[] args) throws Exception {
+        if (args.length < 1)
+            throw new IllegalArgumentException("Path separator must be passed as the first
argument.");
+
+        String separator = args[0];
+
+        StringBuilder sb = new StringBuilder();
+
+        for (String path : HadoopClasspathUtils.classpathForJavaProcess())
+            sb.append(path).append(separator);
+
+        System.out.println(sb);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
new file mode 100644
index 0000000..986b23e
--- /dev/null
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
@@ -0,0 +1,233 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Hadoop classpath utilities.
+ */
+public class HadoopClasspathUtils {
+    /**
+     * Gets Hadoop class path as list of classpath elements for process.
+     *
+     * @return List of the class path elements.
+     * @throws IOException If failed.
+     */
+    public static List<String> classpathForJavaProcess() throws IOException {
+        List<String> res = new ArrayList<>();
+
+        for (final SearchDirectory dir : classpathDirectories()) {
+            if (dir.hasFilter()) {
+                for (File file : dir.files())
+                    res.add(file.getAbsolutePath());
+            }
+            else
+                res.add(dir.absolutePath() + File.separator + '*');
+        }
+
+        return res;
+    }
+
+    /**
+     * Gets Hadoop class path as a list of URLs (for in-process class loader usage).
+     *
+     * @return List of class path URLs.
+     * @throws IOException If failed.
+     */
+    public static List<URL> classpathUrls() throws IOException {
+        List<URL> res = new ArrayList<>();
+
+        for (SearchDirectory dir : classpathDirectories()) {
+            for (File file : dir.files()) {
+                try {
+                    res.add(file.toURI().toURL());
+                }
+                catch (MalformedURLException e) {
+                    throw new IOException("Failed to convert file path to URL: " + file.getPath());
+                }
+            }
+        }
+
+        return res;
+    }
+
+    /**
+     * Gets Hadoop locations.
+     *
+     * @return The Hadoop locations, never null.
+     */
+    public static HadoopLocations hadoopLocations() throws IOException {
+        final String hadoopHome = systemOrEnv("HADOOP_HOME", systemOrEnv("HADOOP_PREFIX",
null));
+
+        String commonHome = resolveLocation("HADOOP_COMMON_HOME", hadoopHome, "/share/hadoop/common");
+        String hdfsHome = resolveLocation("HADOOP_HDFS_HOME", hadoopHome, "/share/hadoop/hdfs");
+        String mapredHome = resolveLocation("HADOOP_MAPRED_HOME", hadoopHome, "/share/hadoop/mapreduce");
+
+        return new HadoopLocations(hadoopHome, commonHome, hdfsHome, mapredHome);
+    }
+
+    /**
+     * Gets base directories to discover classpath elements in.
+     *
+     * @return Collection of directory and mask pairs.
+     * @throws IOException if a mandatory classpath location is not found.
+     */
+    private static Collection<SearchDirectory> classpathDirectories() throws IOException
{
+        HadoopLocations loc = hadoopLocations();
+
+        Collection<SearchDirectory> res = new ArrayList<>();
+
+        res.add(new SearchDirectory(new File(loc.commonHome(), "lib"), null));
+        res.add(new SearchDirectory(new File(loc.hdfsHome(), "lib"), null));
+        res.add(new SearchDirectory(new File(loc.mapredHome(), "lib"), null));
+
+        res.add(new SearchDirectory(new File(loc.commonHome()), "hadoop-common-"));
+        res.add(new SearchDirectory(new File(loc.commonHome()), "hadoop-auth-"));
+
+        res.add(new SearchDirectory(new File(loc.hdfsHome()), "hadoop-hdfs-"));
+
+        res.add(new SearchDirectory(new File(loc.mapredHome()), "hadoop-mapreduce-client-common"));
+        res.add(new SearchDirectory(new File(loc.mapredHome()), "hadoop-mapreduce-client-core"));
+
+        return res;
+    }
+
+    /**
+     * Resolves a Hadoop location directory.
+     *
+     * @param envVarName Environment variable name. The value denotes the location path.
+     * @param hadoopHome Hadoop home location, may be null.
+     * @param expHadoopHomeRelativePath The path relative to Hadoop home, expected to start
with path separator.
+     * @throws IOException If the value cannot be resolved to an existing directory.
+     */
+    private static String resolveLocation(String envVarName, String hadoopHome, String expHadoopHomeRelativePath)
+        throws IOException {
+        String val = systemOrEnv(envVarName, null);
+
+        if (val == null) {
+            // The env. variable is not set. Try to resolve the location relative HADOOP_HOME:
+            if (!directoryExists(hadoopHome))
+                throw new IOException("Failed to resolve Hadoop installation location. "
+
+                        envVarName + " or HADOOP_HOME environment variable should be set.");
+
+            val = hadoopHome + expHadoopHomeRelativePath;
+        }
+
+        if (!directoryExists(val))
+            throw new IOException("Failed to resolve Hadoop location [path=" + val + ']');
+
+        return val;
+    }
+
+    /**
+     * Note that this method does not treat empty value as an absent value.
+     *
+     * @param name Variable name.
+     * @param dflt Default.
+     * @return Value.
+     */
+    private static String systemOrEnv(String name, String dflt) {
+        String res = System.getProperty(name);
+
+        if (res == null)
+            res = System.getenv(name);
+
+        return res == null ? dflt : res;
+    }
+
+    /**
+     * Answers if the given path denotes existing directory.
+     *
+     * @param path The directory path.
+     * @return {@code True} if the given path denotes an existing directory.
+     */
+    private static boolean directoryExists(String path) {
+        if (path == null)
+            return false;
+
+        Path p = Paths.get(path);
+
+        return Files.exists(p) && Files.isDirectory(p) && Files.isReadable(p);
+    }
+
+    /**
+     * Simple pair-like structure to hold directory name and a mask assigned to it.
+     */
+    private static class SearchDirectory {
+        /** File. */
+        private final File dir;
+
+        /** The mask. */
+        private final String filter;
+
+        /**
+         * Constructor.
+         *
+         * @param dir Directory.
+         * @param filter Filter.
+         */
+        private SearchDirectory(File dir, String filter) throws IOException {
+            this.dir = dir;
+            this.filter = filter;
+
+            if (!directoryExists(dir.getAbsolutePath()))
+                throw new IOException("Directory cannot be read: " + dir.getAbsolutePath());
+        }
+
+        /**
+         * @return Absolute path.
+         */
+        private String absolutePath() {
+            return dir.getAbsolutePath();
+        }
+
+        /**
+         * @return Child files.
+         */
+        private File[] files() throws IOException {
+            File[] files = dir.listFiles(new FilenameFilter() {
+                @Override public boolean accept(File dir, String name) {
+                    return filter == null || name.startsWith(filter);
+                }
+            });
+
+            if (files == null)
+                throw new IOException("Path is not a directory. [dir=" + dir + ']');
+
+            return files;
+        }
+
+        /**
+         * @return {@code True} if filter exists.
+         */
+        private boolean hasFilter() {
+            return filter != null;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
new file mode 100644
index 0000000..b3e2d12
--- /dev/null
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+/**
+ * Simple structure to hold Hadoop directory locations.
+ */
+public class HadoopLocations {
+    /** Hadoop home. */
+    private final String home;
+
+    /** Common home. */
+    private final String commonHome;
+
+    /** HDFS home. */
+    private final String hdfsHome;
+
+    /** Mapred home. */
+    private final String mapredHome;
+
+    /**
+     * Constructor.
+     *
+     * @param home Hadoop home.
+     * @param commonHome Common home.
+     * @param hdfsHome HDFS home.
+     * @param mapredHome Mapred home.
+     */
+    public HadoopLocations(String home, String commonHome, String hdfsHome, String mapredHome)
{
+        this.home = home;
+        this.commonHome = commonHome;
+        this.hdfsHome = hdfsHome;
+        this.mapredHome = mapredHome;
+    }
+
+    /**
+     * @return Hadoop home.
+     */
+    public String home() {
+        return home;
+    }
+
+    /**
+     * @return Common home.
+     */
+    public String commonHome() {
+        return commonHome;
+    }
+
+    /**
+     * @return HDFS home.
+     */
+    public String hdfsHome() {
+        return hdfsHome;
+    }
+
+    /**
+     * @return Mapred home.
+     */
+    public String mapredHome() {
+        return mapredHome;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/26eec3a0/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
index 7292c78..c45953e 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
@@ -17,6 +17,7 @@
 
 package org.apache.ignite.internal.processors.hadoop;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.ListIterator;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -33,9 +34,6 @@ import org.apache.ignite.internal.util.tostring.GridToStringExclude;
 import org.apache.ignite.internal.util.typedef.internal.S;
 import org.apache.ignite.internal.util.typedef.internal.U;
 
-import static org.apache.ignite.internal.processors.hadoop.HadoopClassLoader.hadoopHome;
-import static org.apache.ignite.internal.processors.hadoop.HadoopClassLoader.hadoopUrls;
-
 /**
  * Hadoop processor.
  */
@@ -74,36 +72,35 @@ public class HadoopProcessor extends HadoopProcessorAdapter {
 
         validate(cfg);
 
-        if (hadoopHome() != null)
-            U.quietAndInfo(log, "HADOOP_HOME is set to " + hadoopHome());
-
-        boolean ok = false;
+        try {
+            HadoopLocations loc = HadoopClasspathUtils.hadoopLocations();
 
-        try { // Check for Hadoop installation.
-            hadoopUrls();
+            if (loc.home() != null)
+                U.quietAndInfo(log, "HADOOP_HOME is set to " + loc.home());
 
-            ok = true;
+            U.quietAndInfo(log, "HADOOP_COMMON_HOME is set to " + loc.commonHome());
+            U.quietAndInfo(log, "HADOOP_HDFS_HOME is set to " + loc.hdfsHome());
+            U.quietAndInfo(log, "HADOOP_MAPRED_HOME is set to " + loc.mapredHome());
         }
-        catch (IgniteCheckedException e) {
-            U.quietAndWarn(log, e.getMessage());
+        catch (IOException ioe) {
+            throw new IgniteCheckedException(ioe);
         }
 
-        if (ok) {
-            hctx = new HadoopContext(
-                ctx,
-                cfg,
-                new HadoopJobTracker(),
-                new HadoopEmbeddedTaskExecutor(),
-                // TODO: IGNITE-404: Uncomment when fixed.
-                //cfg.isExternalExecution() ? new HadoopExternalTaskExecutor() : new HadoopEmbeddedTaskExecutor(),
-                new HadoopShuffle());
+        HadoopClassLoader.hadoopUrls();
 
+        hctx = new HadoopContext(
+            ctx,
+            cfg,
+            new HadoopJobTracker(),
+            new HadoopEmbeddedTaskExecutor(),
+            // TODO: IGNITE-404: Uncomment when fixed.
+            //cfg.isExternalExecution() ? new HadoopExternalTaskExecutor() : new HadoopEmbeddedTaskExecutor(),
+            new HadoopShuffle());
 
-            for (HadoopComponent c : hctx.components())
-                c.start(hctx);
+        for (HadoopComponent c : hctx.components())
+            c.start(hctx);
 
-            hadoop = new HadoopImpl(this);
-        }
+        hadoop = new HadoopImpl(this);
     }
 
     /** {@inheritDoc} */


Mime
View raw message