ignite-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From voze...@apache.org
Subject [13/27] ignite git commit: IGNITE-3184: Final review.
Date Fri, 01 Jul 2016 12:10:56 GMT
IGNITE-3184: Final review.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/52841f90
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/52841f90
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/52841f90

Branch: refs/heads/ignite-3185
Commit: 52841f90a132d5c91a66ae171cb0169663f04049
Parents: 9cc1607
Author: vozerov-gridgain <vozerov@gridgain.com>
Authored: Fri Jun 24 17:22:43 2016 +0300
Committer: vozerov-gridgain <vozerov@gridgain.com>
Committed: Fri Jun 24 17:22:43 2016 +0300

----------------------------------------------------------------------
 .../processors/hadoop/HadoopClassLoader.java    |   2 +-
 .../processors/hadoop/HadoopClasspathMain.java  |  16 +-
 .../processors/hadoop/HadoopClasspathUtils.java | 242 +++++++------------
 .../processors/hadoop/HadoopLocations.java      |  78 ++++++
 .../processors/hadoop/HadoopProcessor.java      |  13 +-
 5 files changed, 181 insertions(+), 170 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/52841f90/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
index c0251eb..1c844c4 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClassLoader.java
@@ -498,7 +498,7 @@ public class HadoopClassLoader extends URLClassLoader implements ClassCache
{
                 return hadoopUrls;
 
             try {
-                hadoopUrls = HadoopClasspathUtils.getAsUrlList();
+                hadoopUrls = HadoopClasspathUtils.classpathUrls();
             }
             catch (IOException e) {
                 throw new IgniteCheckedException("Failed to resolve Hadoop JAR locations.",
e);

http://git-wip-us.apache.org/repos/asf/ignite/blob/52841f90/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
index b1422d4..5279b7d 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathMain.java
@@ -17,8 +17,6 @@
 
 package org.apache.ignite.internal.processors.hadoop;
 
-import java.util.List;
-
 /**
  * Main class to compose Hadoop classpath depending on the environment.
  * This class is designed to be independent on any Ignite classes as possible.
@@ -32,17 +30,15 @@ public class HadoopClasspathMain {
      */
     public static void main(String[] args) throws Exception {
         if (args.length < 1)
-            throw new IllegalArgumentException("Path separator must be passed as the 1st
argument.");
+            throw new IllegalArgumentException("Path separator must be passed as the first
argument.");
 
-        final String sep = args[0];
+        String separator = args[0];
 
-        List<String> cp = HadoopClasspathUtils.getAsProcessClasspath();
+        StringBuilder sb = new StringBuilder();
 
-        for (String s: cp) {
-            System.out.print(s);
-            System.out.print(sep);
-        }
+        for (String path : HadoopClasspathUtils.classpathForJavaProcess())
+            sb.append(path).append(separator);
 
-        System.out.println();
+        System.out.println(sb);
     }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/52841f90/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
index bb7ccd3..a934f61 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
@@ -39,15 +39,19 @@ public class HadoopClasspathUtils {
      * @return List of the class path elements.
      * @throws IOException If failed.
      */
-    static List<String> getAsProcessClasspath() throws IOException {
-        Collection<DirAndMask> dams = getClasspathBaseDirectories();
+    public static List<String> classpathForJavaProcess() throws IOException {
+        List<String> res = new ArrayList<>();
 
-        List<String> list = new ArrayList<>(32);
-
-        for (DirAndMask dam: dams)
-            addAsJavaProcessClasspathElement(list, dam.dir, dam.mask);
+        for (final SearchDirectory dir : classpathDirectories()) {
+            if (dir.hasFilter()) {
+                for (File file : dir.files())
+                    res.add(file.getAbsolutePath());
+            }
+            else
+                res.add(dir.dir.getAbsolutePath() + File.separator + '*');
+        }
 
-        return list;
+        return res;
     }
 
     /**
@@ -56,113 +60,21 @@ public class HadoopClasspathUtils {
      * @return List of class path URLs.
      * @throws IOException If failed.
      */
-    public static List<URL> getAsUrlList() throws IOException {
-        Collection<DirAndMask> dams = getClasspathBaseDirectories();
-
-        List<URL> list = new ArrayList<>(32);
-
-        for (DirAndMask dam: dams)
-            // Note that this procedure does not use '*' classpath patterns,
-            // but adds all the children explicitly:
-            addUrls(list, dam.dir, dam.mask);
-
-        return list;
-    }
-
-    /**
-     * Discovers classpath entries in specified directory and adds them as URLs to the given
{@code res} collection.
-     *
-     * @param res Result.
-     * @param dir Directory.
-     * @param startsWith Starts with prefix.
-     * @throws IOException If failed.
-     */
-    private static void addUrls(Collection<URL> res, File dir, final String startsWith)
throws IOException {
-        File[] files = dir.listFiles(new FilenameFilter() {
-            @Override public boolean accept(File dir, String name) {
-                return startsWith == null || name.startsWith(startsWith);
-            }
-        });
-
-        if (files == null)
-            throw new IOException("Path is not a directory. [dir=" + dir + ']');
-
-        for (File file : files) {
-            try {
-                res.add(file.toURI().toURL());
-            }
-            catch (MalformedURLException e) {
-                throw new IOException("Failed to convert file path to URL: " + file.getPath());
-            }
-        }
-    }
-
-    /**
-     * Discovers classpath entries in specified directory and adds them as URLs to the given
{@code res} collection.
-     *
-     * @param res Result.
-     * @param dir Directory.
-     * @param startsWith Starts with prefix.
-     * @throws IOException If failed.
-     */
-    private static void addAsJavaProcessClasspathElement(Collection<String> res, File
dir, final String startsWith)
-        throws IOException {
-        if (!dir.exists() || !dir.isDirectory() || !dir.canRead())
-            throw new IOException("Path is not an existing readable directory. [dir=" + dir
+ ']');
+    public static List<URL> classpathUrls() throws IOException {
+        List<URL> res = new ArrayList<>();
 
-        if (startsWith == null)
-            res.add(dir.getAbsolutePath() + File.separator + '*');
-        else {
-            File[] files = dir.listFiles(new FilenameFilter() {
-                @Override public boolean accept(File dir, String name) {
-                    return name.startsWith(startsWith);
+        for (SearchDirectory dir : classpathDirectories()) {
+            for (File file : dir.files()) {
+                try {
+                    res.add(file.toURI().toURL());
                 }
-            });
-
-            if (files == null)
-                throw new IOException("Path is not a directory. [" + dir + ']');
-
-            for (File file : files)
-                res.add(file.getAbsolutePath());
+                catch (MalformedURLException e) {
+                    throw new IOException("Failed to convert file path to URL: " + file.getPath());
+                }
+            }
         }
-    }
 
-    /**
-     * @return HADOOP_HOME Variable.
-     */
-    private static String hadoopHome() {
-        String prefix = getEnv("HADOOP_PREFIX", null);
-
-        return getEnv("HADOOP_HOME", prefix);
-    }
-
-    /**
-     * Simple structure to hold Hadoop directory locations.
-     */
-    public static class HadoopLocations {
-        /** HADOOP_HOME, may be null. */
-        public final String home;
-        /** HADOOP_COMMON_HOME */
-        public final String common;
-        /** HADOOP_HDFS_HOME */
-        public final String hdfs;
-        /** HADOOP_MAPRED_HOME */
-        public final String mapred;
-
-        /**
-         * Constructor.
-         *
-         * @param home HADOOP_HOME
-         * @param common HADOOP_COMMON_HOME
-         * @param hdfs HADOOP_HDFS_HOME
-         * @param mapred HADOOP_MAPRED_HOME
-         */
-        HadoopLocations(String home, String common, String hdfs, String mapred) {
-            this.home = home;
-            this.common = common;
-            this.hdfs = hdfs;
-            this.mapred = mapred;
-        }
+        return res;
     }
 
     /**
@@ -170,8 +82,8 @@ public class HadoopClasspathUtils {
      *
      * @return The Hadoop locations, never null.
      */
-    public static HadoopLocations getHadoopLocations() throws IOException {
-        final String hadoopHome = hadoopHome();
+    public static HadoopLocations hadoopLocations() throws IOException {
+        final String hadoopHome = systemOrEnv("HADOOP_HOME", systemOrEnv("HADOOP_PREFIX",
null));
 
         String commonHome = resolveLocation("HADOOP_COMMON_HOME", hadoopHome, "/share/hadoop/common");
         String hdfsHome = resolveLocation("HADOOP_HDFS_HOME", hadoopHome, "/share/hadoop/hdfs");
@@ -186,46 +98,24 @@ public class HadoopClasspathUtils {
      * @return Collection of directory and mask pairs.
      * @throws IOException if a mandatory classpath location is not found.
      */
-    private static Collection<DirAndMask> getClasspathBaseDirectories() throws IOException
{
-        HadoopLocations loc = getHadoopLocations();
+    private static Collection<SearchDirectory> classpathDirectories() throws IOException
{
+        HadoopLocations loc = hadoopLocations();
 
-        Collection<DirAndMask> c = new ArrayList<>();
+        Collection<SearchDirectory> res = new ArrayList<>();
 
-        c.add(new DirAndMask(new File(loc.common, "lib"), null));
-        c.add(new DirAndMask(new File(loc.hdfs, "lib"), null));
-        c.add(new DirAndMask(new File(loc.mapred, "lib"), null));
+        res.add(new SearchDirectory(new File(loc.commonHome(), "lib"), null));
+        res.add(new SearchDirectory(new File(loc.hdfsHome(), "lib"), null));
+        res.add(new SearchDirectory(new File(loc.mapredHome(), "lib"), null));
 
-        c.add(new DirAndMask(new File(loc.common), "hadoop-common-"));
-        c.add(new DirAndMask(new File(loc.common), "hadoop-auth-"));
+        res.add(new SearchDirectory(new File(loc.commonHome()), "hadoop-common-"));
+        res.add(new SearchDirectory(new File(loc.commonHome()), "hadoop-auth-"));
 
-        c.add(new DirAndMask(new File(loc.hdfs), "hadoop-hdfs-"));
+        res.add(new SearchDirectory(new File(loc.hdfsHome()), "hadoop-hdfs-"));
 
-        c.add(new DirAndMask(new File(loc.mapred), "hadoop-mapreduce-client-common"));
-        c.add(new DirAndMask(new File(loc.mapred), "hadoop-mapreduce-client-core"));
+        res.add(new SearchDirectory(new File(loc.mapredHome()), "hadoop-mapreduce-client-common"));
+        res.add(new SearchDirectory(new File(loc.mapredHome()), "hadoop-mapreduce-client-core"));
 
-        return c;
-    }
-
-    /**
-     * Simple pair-like structure to hold directory name and a mask assigned to it.
-     */
-    public static class DirAndMask {
-        /**
-         * Constructor.
-         *
-         * @param dir The directory.
-         * @param mask The mask.
-         */
-        DirAndMask(File dir, String mask) {
-            this.dir = dir;
-            this.mask = mask;
-        }
-
-        /** The path. */
-        public final File dir;
-
-        /** The mask. */
-        public final String mask;
+        return res;
     }
 
     /**
@@ -238,18 +128,18 @@ public class HadoopClasspathUtils {
      */
     private static String resolveLocation(String envVarName, String hadoopHome, String expHadoopHomeRelativePath)
         throws IOException {
-        String val = getEnv(envVarName, null);
+        String val = systemOrEnv(envVarName, null);
 
         if (val == null) {
             // The env. variable is not set. Try to resolve the location relative HADOOP_HOME:
-            if (!isExistingDirectory(hadoopHome))
+            if (!directoryExists(hadoopHome))
                 throw new IOException("Failed to resolve Hadoop installation location. "
+
                         envVarName + " or HADOOP_HOME environment variable should be set.");
 
             val = hadoopHome + expHadoopHomeRelativePath;
         }
 
-        if (!isExistingDirectory(val))
+        if (!directoryExists(val))
             throw new IOException("Failed to resolve Hadoop location [path=" + val + ']');
 
         return val;
@@ -262,7 +152,7 @@ public class HadoopClasspathUtils {
      * @param dflt Default.
      * @return Value.
      */
-    private static String getEnv(String name, String dflt) {
+    private static String systemOrEnv(String name, String dflt) {
         String res = System.getProperty(name);
 
         if (res == null)
@@ -275,9 +165,9 @@ public class HadoopClasspathUtils {
      * Answers if the given path denotes existing directory.
      *
      * @param path The directory path.
-     * @return 'true' if the given path denotes an existing directory.
+     * @return {@code True} if the given path denotes an existing directory.
      */
-    private static boolean isExistingDirectory(String path) {
+    private static boolean directoryExists(String path) {
         if (path == null)
             return false;
 
@@ -285,4 +175,52 @@ public class HadoopClasspathUtils {
 
         return Files.exists(p) && Files.isDirectory(p) && Files.isReadable(p);
     }
+
+    /**
+     * Simple pair-like structure to hold directory name and a mask assigned to it.
+     */
+    public static class SearchDirectory {
+        /** File. */
+        private final File dir;
+
+        /** The mask. */
+        private final String filter;
+
+        /**
+         * Constructor.
+         *
+         * @param dir Directory.
+         * @param filter Filter.
+         */
+        private SearchDirectory(File dir, String filter) throws IOException {
+            this.dir = dir;
+            this.filter = filter;
+
+            if (!directoryExists(dir.getAbsolutePath()))
+                throw new IOException("Directory cannot be read: " + dir.getAbsolutePath());
+        }
+
+        /**
+         * @return Child files.
+         */
+        private File[] files() throws IOException {
+            File[] files = dir.listFiles(new FilenameFilter() {
+                @Override public boolean accept(File dir, String name) {
+                    return filter == null || name.startsWith(filter);
+                }
+            });
+
+            if (files == null)
+                throw new IOException("Path is not a directory. [dir=" + dir + ']');
+
+            return files;
+        }
+
+        /**
+         * @return {@code True} if filter exists.
+         */
+        private boolean hasFilter() {
+            return filter != null;
+        }
+    }
 }

http://git-wip-us.apache.org/repos/asf/ignite/blob/52841f90/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
new file mode 100644
index 0000000..b3e2d12
--- /dev/null
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopLocations.java
@@ -0,0 +1,78 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ignite.internal.processors.hadoop;
+
+/**
+ * Simple structure to hold Hadoop directory locations.
+ */
+public class HadoopLocations {
+    /** Hadoop home. */
+    private final String home;
+
+    /** Common home. */
+    private final String commonHome;
+
+    /** HDFS home. */
+    private final String hdfsHome;
+
+    /** Mapred home. */
+    private final String mapredHome;
+
+    /**
+     * Constructor.
+     *
+     * @param home Hadoop home.
+     * @param commonHome Common home.
+     * @param hdfsHome HDFS home.
+     * @param mapredHome Mapred home.
+     */
+    public HadoopLocations(String home, String commonHome, String hdfsHome, String mapredHome)
{
+        this.home = home;
+        this.commonHome = commonHome;
+        this.hdfsHome = hdfsHome;
+        this.mapredHome = mapredHome;
+    }
+
+    /**
+     * @return Hadoop home.
+     */
+    public String home() {
+        return home;
+    }
+
+    /**
+     * @return Common home.
+     */
+    public String commonHome() {
+        return commonHome;
+    }
+
+    /**
+     * @return HDFS home.
+     */
+    public String hdfsHome() {
+        return hdfsHome;
+    }
+
+    /**
+     * @return Mapred home.
+     */
+    public String mapredHome() {
+        return mapredHome;
+    }
+}

http://git-wip-us.apache.org/repos/asf/ignite/blob/52841f90/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
----------------------------------------------------------------------
diff --git a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
index c4cd143..c45953e 100644
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopProcessor.java
@@ -33,7 +33,6 @@ import org.apache.ignite.internal.processors.hadoop.taskexecutor.HadoopEmbeddedT
 import org.apache.ignite.internal.util.tostring.GridToStringExclude;
 import org.apache.ignite.internal.util.typedef.internal.S;
 import org.apache.ignite.internal.util.typedef.internal.U;
-import org.apache.ignite.internal.processors.hadoop.HadoopClasspathUtils.HadoopLocations;
 
 /**
  * Hadoop processor.
@@ -74,14 +73,14 @@ public class HadoopProcessor extends HadoopProcessorAdapter {
         validate(cfg);
 
         try {
-            HadoopLocations loc = HadoopClasspathUtils.getHadoopLocations();
+            HadoopLocations loc = HadoopClasspathUtils.hadoopLocations();
 
-            if (loc.home != null)
-                U.quietAndInfo(log, "HADOOP_HOME is set to " + loc.home);
+            if (loc.home() != null)
+                U.quietAndInfo(log, "HADOOP_HOME is set to " + loc.home());
 
-            U.quietAndInfo(log, "HADOOP_COMMON_HOME is set to " + loc.common);
-            U.quietAndInfo(log, "HADOOP_HDFS_HOME is set to " + loc.hdfs);
-            U.quietAndInfo(log, "HADOOP_MAPRED_HOME is set to " + loc.mapred);
+            U.quietAndInfo(log, "HADOOP_COMMON_HOME is set to " + loc.commonHome());
+            U.quietAndInfo(log, "HADOOP_HDFS_HOME is set to " + loc.hdfsHome());
+            U.quietAndInfo(log, "HADOOP_MAPRED_HOME is set to " + loc.mapredHome());
         }
         catch (IOException ioe) {
             throw new IgniteCheckedException(ioe);


Mime
View raw message