ignite-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From voze...@apache.org
Subject [03/27] ignite git commit: IGNITE-3184: rewritten with pure-java classpath composition.
Date Fri, 01 Jul 2016 12:10:46 GMT
IGNITE-3184: rewritten with pure-java classpath composition.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/f0d08f86
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/f0d08f86
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/f0d08f86

Branch: refs/heads/ignite-3185
Commit: f0d08f86fd277aa549ba49d28049e7880e0dd18e
Parents: 0c7b08c
Author: iveselovskiy <iveselovskiy@gridgain.com>
Authored: Mon Jun 20 19:27:24 2016 +0300
Committer: iveselovskiy <iveselovskiy@gridgain.com>
Committed: Mon Jun 20 19:27:24 2016 +0300

----------------------------------------------------------------------
 bin/include/hadoop-classpath.bat                |  73 ------
 bin/include/hadoop-classpath.sh                 |  65 ------
 bin/include/setenv.bat                          |   8 +-
 bin/include/setenv.sh                           |  18 +-
 .../processors/hadoop/HadoopClassLoader.java    |  17 +-
 .../processors/hadoop/HadoopClasspathMain.java  | 231 ++----------------
 .../processors/hadoop/HadoopClasspathUtils.java | 232 +++++++++++++++++++
 .../processors/hadoop/HadoopProcessor.java      |   9 +-
 8 files changed, 261 insertions(+), 392 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/bin/include/hadoop-classpath.bat
----------------------------------------------------------------------
diff --git a/bin/include/hadoop-classpath.bat b/bin/include/hadoop-classpath.bat
deleted file mode 100644
index dd293a7..0000000
--- a/bin/include/hadoop-classpath.bat
+++ /dev/null
@@ -1,73 +0,0 @@
-::
-:: Licensed to the Apache Software Foundation (ASF) under one or more
-:: contributor license agreements.  See the NOTICE file distributed with
-:: this work for additional information regarding copyright ownership.
-:: The ASF licenses this file to You under the Apache License, Version 2.0
-:: (the "License"); you may not use this file except in compliance with
-:: the License.  You may obtain a copy of the License at
-::
-::      http://www.apache.org/licenses/LICENSE-2.0
-::
-:: Unless required by applicable law or agreed to in writing, software
-:: distributed under the License is distributed on an "AS IS" BASIS,
-:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-:: See the License for the specific language governing permissions and
-:: limitations under the License.
-::
-
-:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
-::                 Hadoop class path resolver.
-::  Requires environment variables 'HADOOP_PREFIX' or 'HADOOP_HOME'
-::  to be set.
-:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
-
-:: Turn off script echoing.
-@echo off
-
-:: Check if environment passes deprecated HADOOP_HOME.
-if not defined HADOOP_PREFIX set HADOOP_PREFIX=%HADOOP_HOME%
-
-:: Exit if we cannot find Hadoop installation directory.
-if not defined HADOOP_PREFIX goto :eof
-
-:: Trim quotes.
-set HADOOP_PREFIX=%HADOOP_PREFIX:"=%
-
-:: Trim slashes.
-if %HADOOP_PREFIX:~-1,1% == \ (
-    set HADOOP_PREFIX=%HADOOP_PREFIX:~0,-1%
-)
-
-::
-:: Setting the rest of Hadoop environment variables.
-::
-
-if not defined HADOOP_COMMON_HOME set HADOOP_COMMON_HOME=%HADOOP_PREFIX%\share\hadoop\common
-if not defined HADOOP_HDFS_HOME set HADOOP_HDFS_HOME=%HADOOP_PREFIX%\share\hadoop\hdfs
-if not defined HADOOP_MAPRED_HOME set HADOOP_MAPRED_HOME=%HADOOP_PREFIX%\share\hadoop\mapreduce
-
-::
-:: Libraries included in classpath.
-::
-
-set CP=%HADOOP_COMMON_HOME%\lib\*;%HADOOP_MAPRED_HOME%\lib\*;%HADOOP_MAPRED_HOME%\lib\*
-
-:: hadoop-auth-* jar can be located either in home or in home/lib directory, depending on
the hadoop version.
-for /f %%f in ('dir /B %HADOOP_COMMON_HOME%\hadoop-auth-* ^>nul 2^>^&1') do call
:concat %HADOOP_COMMON_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_COMMON_HOME%\lib\hadoop-auth-* ^>nul 2^>^&1') do
call :concat %HADOOP_COMMON_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_COMMON_HOME%\hadoop-common-*') do call :concat %HADOOP_COMMON_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_HDFS_HOME%\hadoop-hdfs-*') do call :concat %HADOOP_HDFS_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_MAPRED_HOME%\hadoop-mapreduce-client-common-*') do call :concat
%HADOOP_MAPRED_HOME%\%%f
-for /f %%f in ('dir /B %HADOOP_MAPRED_HOME%\hadoop-mapreduce-client-core-*') do call :concat
%HADOOP_MAPRED_HOME%\%%f
-
-:: Export result.
-set IGNITE_HADOOP_CLASSPATH=%CP%
-
-:: Exit.
-goto :eof
-
-:: Function that adds jar dependency into classpath.
-:concat
-    set file=%1
-    if %file:~-9,9% neq tests.jar set CP=%CP%;%1
-goto :eof

http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/bin/include/hadoop-classpath.sh
----------------------------------------------------------------------
diff --git a/bin/include/hadoop-classpath.sh b/bin/include/hadoop-classpath.sh
deleted file mode 100755
index 3a0aeb1..0000000
--- a/bin/include/hadoop-classpath.sh
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-####################################################################
-#                 Hadoop class path resolver.
-#  Requires environment variables 'HADOOP_PREFIX' or 'HADOOP_HOME'
-#  to be set. If they are both undefined , tries to read them from
-#  from '/etc/default/hadoop' file. The final results are printed
-#  into standard output.
-####################################################################
-
-# Resolve constants.
-HADOOP_DEFAULTS="/etc/default/hadoop"
-HADOOP_PREFIX=${HADOOP_PREFIX:-$HADOOP_HOME}
-
-# Try get all variables from Hadoop default environment config
-# if they have not been passed into the script.
-if [[ -z "$HADOOP_PREFIX" && -f "$HADOOP_DEFAULTS" ]]; then
-    source "$HADOOP_DEFAULTS"
-fi
-
-# Return if Hadoop couldn't be found.
-[ -z "$HADOOP_PREFIX" ] && return
-
-#
-# Resolve the rest of Hadoop environment variables.
-#
-
-HADOOP_COMMON_HOME=${HADOOP_COMMON_HOME-"${HADOOP_PREFIX}/share/hadoop/common"}
-HADOOP_HDFS_HOME=${HADOOP_HDFS_HOME-"${HADOOP_PREFIX}/share/hadoop/hdfs"}
-HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME-"${HADOOP_PREFIX}/share/hadoop/mapreduce"}
-
-#
-# Calculate classpath string with required Hadoop libraries.
-#
-
-# Add all Hadoop libs.
-IGNITE_HADOOP_CLASSPATH="${HADOOP_COMMON_HOME}/lib/*${SEP}${HADOOP_MAPRED_HOME}/lib/*${SEP}${HADOOP_MAPRED_HOME}/lib/*"
-
-# Skip globbing pattern if it cannot be resolved.
-shopt -s nullglob
-
-# Add jars to classpath excluding tests.
-# hadoop-auth-* jar can be located either in home or in home/lib directory, depending on
the hadoop version.
-for file in ${HADOOP_HDFS_HOME}/hadoop-hdfs-* \
-            ${HADOOP_COMMON_HOME}/hadoop-{common,auth}-* \
-            ${HADOOP_COMMON_HOME}/lib/hadoop-auth-* \
-            ${HADOOP_MAPRED_HOME}/hadoop-mapreduce-client-{common,core}-*; do
-    [[ "$file" != *-tests.jar ]] && IGNITE_HADOOP_CLASSPATH=${IGNITE_HADOOP_CLASSPATH}${SEP}${file}
-done

http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/bin/include/setenv.bat
----------------------------------------------------------------------
diff --git a/bin/include/setenv.bat b/bin/include/setenv.bat
index 811bc95..7022a36 100644
--- a/bin/include/setenv.bat
+++ b/bin/include/setenv.bat
@@ -23,7 +23,7 @@
 :: in other scripts to set classpath using exported IGNITE_LIBS variable.
 ::
 
-@echo off
+:: @echo off
 
 :: USER_LIBS variable can optionally contain user's JARs/libs.
 :: set USER_LIBS=
@@ -47,11 +47,9 @@ if exist %IGNITE_HOME%\libs\ignite-hadoop set HADOOP_EDITION=1
 
 if defined USER_LIBS set IGNITE_LIBS=%USER_LIBS%;%IGNITE_LIBS%
 
-if "%HADOOP_EDITION%" == "1" call "%SCRIPTS_HOME%\include\hadoop-classpath.bat"
+FOR /F "delims=" %%i IN ('%JAVA_HOME%\bin\java.exe -cp %IGNITE_HOME%\libs\ignite-hadoop\*
org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ";"' ) DO set IGNITE_HADOOP_CLASSPATH=%%i
 
-set COMMON_HOME_LIB=%HADOOP_COMMON_HOME%\lib
-
-if "%IGNITE_HADOOP_CLASSPATH%" == "" goto :eof
+if "[%IGNITE_HADOOP_CLASSPATH%]" == "[]" exit 1
 
 set IGNITE_LIBS=%IGNITE_LIBS%;%IGNITE_HADOOP_CLASSPATH%
 

http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/bin/include/setenv.sh
----------------------------------------------------------------------
diff --git a/bin/include/setenv.sh b/bin/include/setenv.sh
index 2b17253..1f51b59 100755
--- a/bin/include/setenv.sh
+++ b/bin/include/setenv.sh
@@ -72,16 +72,16 @@ if [ "${USER_LIBS}" != "" ]; then
 fi
 
 if [ "${HADOOP_EDITION}" == "1" ]; then
-    #. "${SCRIPTS_HOME}"/include/hadoop-classpath.sh
-    IGNITE_HADOOP_CLASSPATH=$( "$JAVA" -cp "${IGNITE_HOME}"/libs/ignite-hadoop/'*' org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain
)
+    IGNITE_HADOOP_CLASSPATH=$( "$JAVA" -cp "${IGNITE_HOME}"/libs/ignite-hadoop/'*' \
+        org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain ":" )
 
-    # TODO: DEBUG:
-    echo "IGNITE_HADOOP_CLASSPATH=[${IGNITE_HADOOP_CLASSPATH}]"
+    statusCode=${?}
 
-    if [ -z "${IGNITE_HADOOP_CLASSPATH}" ]; then
-        # Error already reported.
-        exit 1
-    else
-        IGNITE_LIBS=${IGNITE_LIBS}${SEP}${IGNITE_HADOOP_CLASSPATH}
+    if [ "${statusCode}" -ne 0 ]; then
+       exit ${statusCode}
     fi
+
+    unset statusCode
+
+    IGNITE_LIBS=${IGNITE_LIBS}${SEP}${IGNITE_HADOOP_CLASSPATH}
 fi

http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index 8831137..8b6ea34 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -54,7 +54,6 @@ import org.objectweb.asm.Opcodes;
 import org.objectweb.asm.Type;
 import org.objectweb.asm.commons.Remapper;
 import org.objectweb.asm.commons.RemappingClassAdapter;
-import static org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain.*;
 
 /**
  * Class loader allowing explicitly load classes without delegation to parent class loader.
@@ -437,20 +436,6 @@ public class HadoopClassLoader extends URLClassLoader implements ClassCache
{
         return hasDot;
     }
 
-//    /**
-//     * @param name Variable name.
-//     * @param dflt Default.
-//     * @return Value.
-//     */
-//    private static String getEnv(String name, String dflt) {
-//        String res = System.getProperty(name);
-//
-//        if (F.isEmpty(res))
-//            res = System.getenv(name);
-//
-//        return F.isEmpty(res) ? dflt : res;
-//    }
-
     /**
      * @param urls URLs.
      * @return URLs.
@@ -493,7 +478,7 @@ public class HadoopClassLoader extends URLClassLoader implements ClassCache
{
                 return hadoopUrls;
 
             try {
-                hadoopUrls = getAsUrlList();
+                hadoopUrls = HadoopClasspathUtils.getAsUrlList();
             }
             catch (IOException e) {
                 throw new IgniteCheckedException(e);

http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
index a83946f..0a0b422 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
@@ -1,32 +1,31 @@
 package org.apache.ignite.internal.processors.hadoop;
 
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.*;
+import java.util.List;
 
 import static java.lang.System.out;
 import static java.lang.System.err;
 
 /**
  * Main class to compose Hadoop classpath depending on the environment.
- * Note that this class should not depend on any classes or libraries except the JDK default
runtime.
+ * This class is designed to be independent on any Ignite classes as possible.
+ * Please make sure to pass the path separator character as the 1st parameter to the main
method.
  */
 public class HadoopClasspathMain {
     /**
+     * Main method to be executed from scripts. It prints the classpath to the standard output.
      *
-     * @param args
+     * @param args The 1st argument should be the path separator character (":" on Linux,
";" on Windows).
      */
     public static void main(String[] args) throws Exception {
-        final char sep = File.pathSeparatorChar;
+        if (args.length < 1) {
+            err.println("Path separator must be passed as the 1st argument.");
 
-        List<String> cp = getAsProcessClasspath();
+            System.exit(1);
+        }
+
+        final String sep = args[0];
+
+        List<String> cp = HadoopClasspathUtils.getAsProcessClasspath();
 
         for (String s: cp) {
             if (s != null && s.length() > 0) {
@@ -37,208 +36,4 @@ public class HadoopClasspathMain {
 
         out.println();
     }
-
-    /**
-     *
-     * @return
-     */
-    private static List<String> getAsProcessClasspath() throws IOException {
-        Collection<DirAndMask> dams = getClasspathBaseDirectories();
-
-        List<String> list = new ArrayList<>(32);
-
-        for (DirAndMask dam: dams)
-            addAsJavaProcessClasspathElement(list, dam.file, dam.mask);
-
-        // Sort the classpath elements to make it more reliable.
-        Collections.sort(list);
-
-        return list;
-    }
-
-    /**
-     *
-     * @return
-     * @throws IOException
-     */
-    public static List<URL> getAsUrlList() throws IOException {
-        Collection<DirAndMask> dams = getClasspathBaseDirectories();
-
-        List<URL> list = new ArrayList<>(32);
-
-        for (DirAndMask dam: dams)
-            // Note that this procedure does not use '*' classpath patterns,
-            // but adds all the children explicitly:
-            addUrls(list, dam.file, dam.mask);
-
-        Collections.sort(list, new Comparator<URL>() {
-            @Override public int compare(URL u1, URL u2) {
-                String s1 = String.valueOf(u1);
-                String s2 = String.valueOf(u2);
-
-                return s1.compareTo(s2);
-            }
-        });
-
-        for (URL u: list)
-            err.println(u);
-
-        return list;
-    }
-
-    /**
-     * @param res Result.
-     * @param dir Directory.
-     * @param startsWith Starts with prefix.
-     * @throws MalformedURLException If failed.
-     */
-    private static void addUrls(Collection<URL> res, File dir, final String startsWith)
throws IOException {
-        File[] files = dir.listFiles(new FilenameFilter() {
-            @Override public boolean accept(File dir, String name) {
-                return startsWith == null || name.startsWith(startsWith);
-            }
-        });
-
-        if (files == null)
-            throw new IOException("Path is not a directory: " + dir);
-
-        for (File file : files)
-            res.add(file.toURI().toURL());
-    }
-
-
-    /**
-     * @param res Result.
-     * @param dir Directory.
-     * @param startsWith Starts with prefix.
-     * @throws MalformedURLException If failed.
-     */
-    private static void addAsJavaProcessClasspathElement(Collection<String> res, File
dir, final String startsWith) throws IOException {
-        if (!dir.exists() || !dir.isDirectory() || !dir.canRead())
-            throw new IOException("Path is not an existing readable directory: " + dir);
-
-        if (startsWith == null)
-            res.add(dir.getAbsolutePath() + File.separator + '*');
-        else {
-            File[] files = dir.listFiles(new FilenameFilter() {
-                @Override public boolean accept(File dir, String name) {
-                    return name.startsWith(startsWith);
-                }
-            });
-
-            if (files == null)
-                throw new IOException("Path is not a directory: " + dir);
-
-            for (File file : files)
-                res.add(file.getAbsolutePath());
-        }
-    }
-
-    /**
-     * @return HADOOP_HOME Variable.
-     */
-    public static String hadoopHome() {
-        String prefix = getEnv("HADOOP_PREFIX", null);
-
-        return getEnv("HADOOP_HOME", prefix);
-    }
-
-    /**
-     *
-     * @return
-     * @throws FileNotFoundException
-     */
-    public static Collection<DirAndMask> getClasspathBaseDirectories() throws FileNotFoundException
{
-        final String hadoopHome = hadoopHome();
-
-        String commonHome = resolveLocation("HADOOP_COMMON_HOME", hadoopHome, "/share/hadoop/common");
-        String hdfsHome = resolveLocation("HADOOP_HDFS_HOME", hadoopHome, "/share/hadoop/hdfs");
-        String mapredHome = resolveLocation("HADOOP_MAPRED_HOME", hadoopHome, "/share/hadoop/mapreduce");
-
-        Collection<DirAndMask> c = new ArrayList<>();
-
-        c.add(new DirAndMask(new File(commonHome, "lib"), null));
-        c.add(new DirAndMask(new File(hdfsHome, "lib"), null));
-        c.add(new DirAndMask(new File(mapredHome, "lib"), null));
-
-        c.add(new DirAndMask(new File(commonHome), "hadoop-common-"));
-        c.add(new DirAndMask(new File(commonHome), "hadoop-auth-"));
-        c.add(new DirAndMask(new File(commonHome, "lib"), "hadoop-auth-"));
-
-        c.add(new DirAndMask(new File(hdfsHome), "hadoop-hdfs-"));
-
-        c.add(new DirAndMask(new File(mapredHome), "hadoop-mapreduce-client-common"));
-        c.add(new DirAndMask(new File(mapredHome), "hadoop-mapreduce-client-core"));
-
-        return c;
-    }
-
-    public static class DirAndMask {
-        DirAndMask(File f, String m) {
-            file = f;
-            mask = m;
-        }
-        public final File file;
-        public final String mask;
-    }
-
-    /**
-     * Checks if the variable is empty.
-     *
-     * @param envVarName Environment variable name.
-     * @param hadoopHome The current value.
-     * @param expHadoopHomeRelativePath The path relative to Hadoop home.
-     * @throws FileNotFoundException If the value is empty.
-     */
-    private static String resolveLocation(String envVarName, String hadoopHome,
-        String expHadoopHomeRelativePath) throws FileNotFoundException {
-        String val = getEnv(envVarName, null);
-
-        if (val == null) {
-            // The env. variable is not set. Try to resolve the location relative HADOOP_HOME:
-            if (!isExistingDirectory(hadoopHome))
-                throw new FileNotFoundException("Failed to resolve Hadoop installation location.
" +
-                        envVarName + " or HADOOP_HOME environment variable should be set.");
-
-            val = hadoopHome + expHadoopHomeRelativePath;
-        }
-
-        if (!isExistingDirectory(val))
-            throw new FileNotFoundException("Failed to resolve Hadoop location. [path=" +
val + ']');
-
-        err.println(envVarName + " resolved to " + val);
-
-        return val;
-    }
-
-    /**
-     * Note that this method does not treat empty value as an absent value.
-     *
-     * @param name Variable name.
-     * @param dflt Default.
-     * @return Value.
-     */
-    private static String getEnv(String name, String dflt) {
-        String res = System.getProperty(name);
-
-        if (res == null)
-            res = System.getenv(name);
-
-        return res == null ? dflt : res;
-    }
-
-    /**
-     * Answers if the given path denotes existing directory.
-     *
-     * @param path The directory path.
-     * @return 'true' if the given path denotes an existing directory.
-     */
-    private static boolean isExistingDirectory(String path) {
-        if (path == null)
-            return false;
-
-        Path p = Paths.get(path);
-
-        return Files.exists(p) && Files.isDirectory(p) && Files.isReadable(p);
-    }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
new file mode 100644
index 0000000..c4a2fcc
--- /dev/null
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
@@ -0,0 +1,232 @@
+package org.apache.ignite.internal.processors.hadoop;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+import static java.lang.System.err;
+
+/**
+ * Hadoop classpath utilities.
+ */
+public class HadoopClasspathUtils {
+    /**
+     * Gets Hadoop class path as list of classpath elements for process.
+     *
+     * @return List of the class path elements.
+     * @throws IOException On error.
+     */
+    static List<String> getAsProcessClasspath() throws IOException {
+        Collection<DirAndMask> dams = getClasspathBaseDirectories();
+
+        List<String> list = new ArrayList<>(32);
+
+        for (DirAndMask dam: dams)
+            addAsJavaProcessClasspathElement(list, dam.dir, dam.mask);
+
+        return list;
+    }
+
+    /**
+     * Gets Hadoop class path as a list of URLs (for in-process class loader usage).
+     *
+     * @return List of class path URLs.
+     * @throws IOException On error.
+     */
+    public static List<URL> getAsUrlList() throws IOException {
+        Collection<DirAndMask> dams = getClasspathBaseDirectories();
+
+        List<URL> list = new ArrayList<>(32);
+
+        for (DirAndMask dam: dams)
+            // Note that this procedure does not use '*' classpath patterns,
+            // but adds all the children explicitly:
+            addUrls(list, dam.dir, dam.mask);
+
+        return list;
+    }
+
+    /**
+     * Discovers classpath entries in specified directory and adds them as URLs to the given
{@code res} collection.
+     *
+     * @param res Result.
+     * @param dir Directory.
+     * @param startsWith Starts with prefix.
+     * @throws MalformedURLException If failed.
+     */
+    private static void addUrls(Collection<URL> res, File dir, final String startsWith)
throws IOException {
+        File[] files = dir.listFiles(new FilenameFilter() {
+            @Override public boolean accept(File dir, String name) {
+                return startsWith == null || name.startsWith(startsWith);
+            }
+        });
+
+        if (files == null)
+            throw new IOException("Path is not a directory. [dir=" + dir + ']');
+
+        for (File file : files)
+            res.add(file.toURI().toURL());
+    }
+
+
+    /**
+     * Discovers classpath entries in specified directory and adds them as URLs to the given
{@code res} collection.
+     *
+     * @param res Result.
+     * @param dir Directory.
+     * @param startsWith Starts with prefix.
+     * @throws MalformedURLException If failed.
+     */
+    private static void addAsJavaProcessClasspathElement(Collection<String> res, File
dir, final String startsWith) throws IOException {
+        if (!dir.exists() || !dir.isDirectory() || !dir.canRead())
+            throw new IOException("Path is not an existing readable directory. [dir=" + dir
+ ']');
+
+        if (startsWith == null)
+            res.add(dir.getAbsolutePath() + File.separator + '*');
+        else {
+            File[] files = dir.listFiles(new FilenameFilter() {
+                @Override public boolean accept(File dir, String name) {
+                    return name.startsWith(startsWith);
+                }
+            });
+
+            if (files == null)
+                throw new IOException("Path is not a directory. [" + dir + ']');
+
+            for (File file : files)
+                res.add(file.getAbsolutePath());
+        }
+    }
+
+    /**
+     * @return HADOOP_HOME Variable.
+     */
+    public static String hadoopHome() {
+        String prefix = getEnv("HADOOP_PREFIX", null);
+
+        return getEnv("HADOOP_HOME", prefix);
+    }
+
+    /**
+     * Gets base directories to discover classpath elements in.
+     *
+     * @return Collection of directory and mask pairs.
+     * @throws FileNotFoundException if a mandatory classpath location is not found.
+     */
+    private static Collection<DirAndMask> getClasspathBaseDirectories() throws FileNotFoundException
{
+        final String hadoopHome = hadoopHome();
+
+        String commonHome = resolveLocation("HADOOP_COMMON_HOME", hadoopHome, "/share/hadoop/common");
+        String hdfsHome = resolveLocation("HADOOP_HDFS_HOME", hadoopHome, "/share/hadoop/hdfs");
+        String mapredHome = resolveLocation("HADOOP_MAPRED_HOME", hadoopHome, "/share/hadoop/mapreduce");
+
+        Collection<DirAndMask> c = new ArrayList<>();
+
+        c.add(new DirAndMask(new File(commonHome, "lib"), null));
+        c.add(new DirAndMask(new File(hdfsHome, "lib"), null));
+        c.add(new DirAndMask(new File(mapredHome, "lib"), null));
+
+        c.add(new DirAndMask(new File(commonHome), "hadoop-common-"));
+        c.add(new DirAndMask(new File(commonHome), "hadoop-auth-"));
+        c.add(new DirAndMask(new File(commonHome, "lib"), "hadoop-auth-"));
+
+        c.add(new DirAndMask(new File(hdfsHome), "hadoop-hdfs-"));
+
+        c.add(new DirAndMask(new File(mapredHome), "hadoop-mapreduce-client-common"));
+        c.add(new DirAndMask(new File(mapredHome), "hadoop-mapreduce-client-core"));
+
+        return c;
+    }
+
+    /**
+     * Simple pair-like structure to hold directory name and a mask assigned to it.
+     */
+    public static class DirAndMask {
+        /**
+         * Constructor.
+         *
+         * @param dir The directory.
+         * @param mask The mask.
+         */
+        DirAndMask(File dir, String mask) {
+            this.dir = dir;
+            this.mask = mask;
+        }
+
+        /** The path. */
+        public final File dir;
+
+        /** The mask. */
+        public final String mask;
+    }
+
+    /**
+     * Checks if the variable is empty.
+     *
+     * @param envVarName Environment variable name.
+     * @param hadoopHome The current value.
+     * @param expHadoopHomeRelativePath The path relative to Hadoop home.
+     * @throws FileNotFoundException If the value is empty.
+     */
+    private static String resolveLocation(String envVarName, String hadoopHome,
+                                          String expHadoopHomeRelativePath) throws FileNotFoundException
{
+        String val = getEnv(envVarName, null);
+
+        if (val == null) {
+            // The env. variable is not set. Try to resolve the location relative HADOOP_HOME:
+            if (!isExistingDirectory(hadoopHome))
+                throw new FileNotFoundException("Failed to resolve Hadoop installation location.
" +
+                        envVarName + " or HADOOP_HOME environment variable should be set.");
+
+            val = hadoopHome + expHadoopHomeRelativePath;
+        }
+
+        if (!isExistingDirectory(val))
+            throw new FileNotFoundException("Failed to resolve Hadoop location. [path=" +
val + ']');
+
+        // Print diagnostic output:
+        err.println(envVarName + " resolved to " + val);
+
+        return val;
+    }
+
+    /**
+     * Note that this method does not treat empty value as an absent value.
+     *
+     * @param name Variable name.
+     * @param dflt Default.
+     * @return Value.
+     */
+    private static String getEnv(String name, String dflt) {
+        String res = System.getProperty(name);
+
+        if (res == null)
+            res = System.getenv(name);
+
+        return res == null ? dflt : res;
+    }
+
+    /**
+     * Answers if the given path denotes existing directory.
+     *
+     * @param path The directory path.
+     * @return 'true' if the given path denotes an existing directory.
+     */
+    private static boolean isExistingDirectory(String path) {
+        if (path == null)
+            return false;
+
+        Path p = Paths.get(path);
+
+        return Files.exists(p) && Files.isDirectory(p) && Files.isReadable(p);
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/f0d08f86/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
index a502c3e..98e1fc5 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
@@ -33,9 +33,6 @@ import org.apache.ignite.internal.util.tostring.GridToStringExclude;
 import org.apache.ignite.internal.util.typedef.internal.S;
 import org.apache.ignite.internal.util.typedef.internal.U;
 
-import static org.apache.ignite.internal.processors.hadoop.HadoopClassLoader.hadoopUrls;
-import static org.apache.ignite.internal.processors.hadoop.HadoopClasspathMain.hadoopHome;
-
 /**
  * Hadoop processor.
  */
@@ -74,13 +71,13 @@ public class HadoopProcessor extends HadoopProcessorAdapter {
 
         validate(cfg);
 
-        if (hadoopHome() != null)
-            U.quietAndInfo(log, "HADOOP_HOME is set to " + hadoopHome());
+        if (HadoopClasspathUtils.hadoopHome() != null)
+            U.quietAndInfo(log, "HADOOP_HOME is set to " + HadoopClasspathUtils.hadoopHome());
 
         boolean ok = false;
 
         try { // Check for Hadoop installation.
-            hadoopUrls();
+            HadoopClassLoader.hadoopUrls();
 
             ok = true;
         }


Mime
View raw message