ignite-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From voze...@apache.org
Subject [15/27] ignite git commit: IGNITE-3185: implemented the logic.
Date Fri, 01 Jul 2016 12:10:58 GMT
IGNITE-3185: implemented the logic.


Project: http://git-wip-us.apache.org/repos/asf/ignite/repo
Commit: http://git-wip-us.apache.org/repos/asf/ignite/commit/04f24348
Tree: http://git-wip-us.apache.org/repos/asf/ignite/tree/04f24348
Diff: http://git-wip-us.apache.org/repos/asf/ignite/diff/04f24348

Branch: refs/heads/ignite-3185
Commit: 04f24348bab2ad4f2a7ac45fbfae0d14236959d4
Parents: c401709 52841f9
Author: iveselovskiy <iveselovskiy@gridgain.com>
Authored: Fri Jun 24 19:20:11 2016 +0300
Committer: iveselovskiy <iveselovskiy@gridgain.com>
Committed: Fri Jun 24 19:20:11 2016 +0300

----------------------------------------------------------------------
 .../processors/hadoop/HadoopClassLoader.java    |   2 +-
 .../processors/hadoop/HadoopClasspathMain.java  |  16 +-
 .../processors/hadoop/HadoopClasspathUtils.java | 288 +++++++++++--------
 .../processors/hadoop/HadoopLocations.java      |  78 +++++
 .../processors/hadoop/HadoopProcessor.java      |  13 +-
 5 files changed, 259 insertions(+), 138 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ignite/blob/04f24348/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
----------------------------------------------------------------------
diff --cc modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
index acf19bd,a934f61..1a62a88
--- a/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
+++ b/modules/hadoop/src/main/java/org/apache/ignite/internal/processors/hadoop/HadoopClasspathUtils.java
@@@ -56,237 -60,36 +60,206 @@@ public class HadoopClasspathUtils 
       * @return List of class path URLs.
       * @throws IOException If failed.
       */
-     public static List<URL> getAsUrlList() throws IOException {
-         Collection<DirAndMask> dams = getClasspathBaseDirectories();
+     public static List<URL> classpathUrls() throws IOException {
+         List<URL> res = new ArrayList<>();
  
-         List<URL> list = new ArrayList<>(32);
- 
-         for (DirAndMask dam: dams)
-             // Note that this procedure does not use '*' classpath patterns,
-             // but adds all the children explicitly:
-             addUrls(list, dam.dir, dam.mask);
- 
-         return list;
-     }
- 
-     /**
-      * Discovers classpath entries in specified directory and adds them as URLs to the given
{@code res} collection.
-      *
-      * @param res Result.
-      * @param dir Directory.
-      * @param startsWith Starts with prefix.
-      * @throws IOException If failed.
-      */
-     private static void addUrls(Collection<URL> res, File dir, final String startsWith)
throws IOException {
-         File[] files = dir.listFiles(new FilenameFilter() {
-             @Override public boolean accept(File dir, String name) {
-                 return startsWith == null || name.startsWith(startsWith);
-             }
-         });
- 
-         if (files == null)
-             throw new IOException("Path is not a directory. [dir=" + dir + ']');
- 
-         for (File file : files) {
-             try {
-                 res.add(file.toURI().toURL());
-             }
-             catch (MalformedURLException e) {
-                 throw new IOException("Failed to convert file path to URL: " + file.getPath());
+         for (SearchDirectory dir : classpathDirectories()) {
+             for (File file : dir.files()) {
+                 try {
+                     res.add(file.toURI().toURL());
+                 }
+                 catch (MalformedURLException e) {
+                     throw new IOException("Failed to convert file path to URL: " + file.getPath());
+                 }
              }
          }
-     }
- 
-     /**
-      * Discovers classpath entries in specified directory and adds them as URLs to the given
{@code res} collection.
-      *
-      * @param res Result.
-      * @param dir Directory.
-      * @param startsWith Starts with prefix.
-      * @throws IOException If failed.
-      */
-     private static void addAsJavaProcessClasspathElement(Collection<String> res, File
dir, final String startsWith)
-         throws IOException {
-         if (!dir.exists() || !dir.isDirectory() || !dir.canRead())
-             throw new IOException("Path is not an existing readable directory. [dir=" +
dir + ']');
  
-         if (startsWith == null)
-             res.add(dir.getAbsolutePath() + File.separator + '*');
-         else {
-             File[] files = dir.listFiles(new FilenameFilter() {
-                 @Override public boolean accept(File dir, String name) {
-                     return name.startsWith(startsWith);
-                 }
-             });
- 
-             if (files == null)
-                 throw new IOException("Path is not a directory. [" + dir + ']');
- 
-             for (File file : files)
-                 res.add(file.getAbsolutePath());
-         }
+         return res;
      }
  
-     /**
-      * @return HADOOP_HOME Variable.
-      */
-     private static String hadoopHome() {
-         String prefix = getEnv("HADOOP_PREFIX", null);
- 
-         return getEnv("HADOOP_HOME", prefix);
-     }
++//    /**
++//     * @return HADOOP_HOME Variable.
++//     */
++//    private static String hadoopHome() {
++//        String prefix = getEnv("HADOOP_PREFIX", null);
++//
++//        return getEnv("HADOOP_HOME", prefix);
++//    }
++
++//    /**
++//     * Simple structure to hold Hadoop directory locations.
++//     */
++//    public static class HadoopLocations {
++//        /** HADOOP_HOME, may be null. */
++//        public final String home;
++//        /** HADOOP_COMMON_HOME */
++//        public final String common;
++//        /** HADOOP_HDFS_HOME */
++//        public final String hdfs;
++//        /** HADOOP_MAPRED_HOME */
++//        public final String mapred;
++//
++//        /**
++//         * Constructor.
++//         *
++//         * @param home HADOOP_HOME
++//         * @param common HADOOP_COMMON_HOME
++//         * @param hdfs HADOOP_HDFS_HOME
++//         * @param mapred HADOOP_MAPRED_HOME
++//         */
++//        HadoopLocations(String home, String common, String hdfs, String mapred) {
++//            this.home = home;
++//            this.common = common;
++//            this.hdfs = hdfs;
++//            this.mapred = mapred;
++//        }
++//    }
 +
 +    /**
 +     * Simple structure to hold Hadoop directory locations.
 +     */
 +    public static class HadoopLocations {
 +        /** HADOOP_HOME, may be null. */
 +        public final String home;
 +        /** HADOOP_COMMON_HOME */
 +        public final String common;
 +        /** HADOOP_HDFS_HOME */
 +        public final String hdfs;
 +        /** HADOOP_MAPRED_HOME */
 +        public final String mapred;
 +
 +        /**
 +         * Constructor.
 +         *
 +         * @param home HADOOP_HOME
 +         * @param common HADOOP_COMMON_HOME
 +         * @param hdfs HADOOP_HDFS_HOME
 +         * @param mapred HADOOP_MAPRED_HOME
 +         */
 +        HadoopLocations(String home, String common, String hdfs, String mapred) {
 +            this.home = home;
 +            this.common = common;
 +            this.hdfs = hdfs;
 +            this.mapred = mapred;
 +        }
 +
 +        /**
 +         * Answers if all the base directories are defined.
 +         *
 +         * @return 'true' if "common", "hdfs", and "mapred" directories are defined.
 +         */
 +        public boolean isDefined() {
 +            return common != null && hdfs != null && mapred != null;
 +        }
 +
 +        /**
 +         * Answers if all the base directories exist.
 +         *
 +         * @return 'true' if "common", "hdfs", and "mapred" directories do exist.
 +         */
 +        public boolean exists() {
 +            return isExistingDirectory(common)
 +                && isExistingDirectory(hdfs)
 +                && isExistingDirectory(mapred);
 +        }
 +
 +        /**
 +         * Checks if all the base directories exist.
 +         *
 +         * @return this reference.
 +         * @throws IOException if any of the base directories does not exist.
 +         */
 +        public HadoopLocations existsOrException() throws IOException {
 +            if (!isExistingDirectory(common))
 +                throw new IOException("Failed to resolve Hadoop installation location. HADOOP_COMMON_HOME
" +
 +                    "or HADOOP_HOME environment variable should be set.");
 +
 +            if (!isExistingDirectory(hdfs))
 +                throw new IOException("Failed to resolve Hadoop installation location. HADOOP_HDFS_HOME
" +
 +                    "or HADOOP_HOME environment variable should be set.");
 +
 +            if (!isExistingDirectory(mapred))
 +                throw new IOException("Failed to resolve Hadoop installation location. HADOOP_MAPRED_HOME
" +
 +                    "or HADOOP_HOME environment variable should be set.");
 +
 +            return this;
 +        }
 +    }
 +
 +    /**
 +     * Gets locations from the environment.
 +     *
 +     * @return The locations as determined from the environment.
 +     */
 +    private static HadoopLocations getEnvHadoopLocations() {
 +        return new HadoopLocations(
 +            hadoopHome(),
 +            getEnv("HADOOP_COMMON_HOME", null),
 +            getEnv("HADOOP_HDFS_HOME", null),
 +            getEnv("HADOOP_MAPRED_HOME", null)
 +        );
 +    }
 +
 +    /**
 +     * Gets locations assuming Apache Hadoop distribution layout.
 +     *
 +     * @return The locations as for Apache distribution.
 +     */
 +    private static HadoopLocations getApacheHadoopLocations(String hadoopHome) {
 +        return new HadoopLocations(hadoopHome,
 +            hadoopHome + "/share/hadoop/common",
 +            hadoopHome + "/share/hadoop/hdfs",
 +            hadoopHome + "/share/hadoop/mapreduce");
 +    }
 +
 +    /** HDP Hadoop locations. */
 +    private static final HadoopLocations HDP_HADOOP_LOCATIONS = new HadoopLocations(
 +        "/usr/hdp/current/hadoop-client",
 +        "/usr/hdp/current/hadoop-client",
 +        "/usr/hdp/current/hadoop-hdfs-client/",
 +        "/usr/hdp/current/hadoop-mapreduce-client/");
 +
 +    /**
 +     * HDP locations relative to an arbitrary Hadoop home.
 +     *
 +     * @param hadoopHome The hadoop home.
 +     * @return The locations.
 +     */
 +    private static HadoopLocations getHdpLocationsRelative(String hadoopHome) {
 +        return new HadoopLocations(hadoopHome, hadoopHome,
 +            hadoopHome + "/../hadoop-hdfs-client/",
 +            hadoopHome + "/../hadoop-mapreduce-client/");
 +    }
- 
++    
++    
      /**
-      * Gets the existing Hadoop locations, if any.
+      * Gets Hadoop locations.
       *
-      * @return Existing Hadoop locations.
-      * @throws IOException If no existing location found.
 -     * @return The Hadoop locations, never null.
++     * @return The locations as determined from the environment.
       */
 -    public static HadoopLocations hadoopLocations() throws IOException {
 -        final String hadoopHome = systemOrEnv("HADOOP_HOME", systemOrEnv("HADOOP_PREFIX",
null));
 +    public static HadoopLocations getHadoopLocations() throws IOException {
-         // 1. Try locations defined in System properties or environment:
-         HadoopLocations loc = getEnvHadoopLocations();
- 
-         if (loc.isDefined())
-             return loc.existsOrException();
- 
 +        final String hadoopHome = hadoopHome();
 +
 +        if (hadoopHome != null) {
 +            // If home is defined, it must exist:
 +            if (!isExistingDirectory(hadoopHome))
 +                throw new IOException("HADOOP_HOME location is not an existing readable
directory. [dir="
 +                    + hadoopHome + ']');
 +
 +            // 2. Try Apache Hadoop locations defined relative to HADOOP_HOME:
 +            loc = getApacheHadoopLocations(hadoopHome);
  
 -        String commonHome = resolveLocation("HADOOP_COMMON_HOME", hadoopHome, "/share/hadoop/common");
 -        String hdfsHome = resolveLocation("HADOOP_HDFS_HOME", hadoopHome, "/share/hadoop/hdfs");
 -        String mapredHome = resolveLocation("HADOOP_MAPRED_HOME", hadoopHome, "/share/hadoop/mapreduce");
 +            if (loc.exists())
 +                return loc;
  
 -        return new HadoopLocations(hadoopHome, commonHome, hdfsHome, mapredHome);
 +            // 3. Try HDP Hadoop locations defined relative to HADOOP_HOME:
 +            loc = getHdpLocationsRelative(hadoopHome);
 +
 +            return loc.existsOrException();
 +        }
 +
 +        // 4. Try absolute HDP (Hortonworks) location:
 +        return HDP_HADOOP_LOCATIONS.existsOrException();
      }
  
      /**
@@@ -295,46 -98,51 +268,73 @@@
       * @return Collection of directory and mask pairs.
       * @throws IOException if a mandatory classpath location is not found.
       */
-     private static Collection<DirAndMask> getClasspathBaseDirectories() throws IOException
{
-         HadoopLocations loc = getHadoopLocations();
+     private static Collection<SearchDirectory> classpathDirectories() throws IOException
{
+         HadoopLocations loc = hadoopLocations();
  
-         Collection<DirAndMask> c = new ArrayList<>();
+         Collection<SearchDirectory> res = new ArrayList<>();
  
-         c.add(new DirAndMask(new File(loc.common, "lib"), null));
-         c.add(new DirAndMask(new File(loc.hdfs, "lib"), null));
-         c.add(new DirAndMask(new File(loc.mapred, "lib"), null));
+         res.add(new SearchDirectory(new File(loc.commonHome(), "lib"), null));
+         res.add(new SearchDirectory(new File(loc.hdfsHome(), "lib"), null));
+         res.add(new SearchDirectory(new File(loc.mapredHome(), "lib"), null));
  
-         c.add(new DirAndMask(new File(loc.common), "hadoop-common-"));
-         c.add(new DirAndMask(new File(loc.common), "hadoop-auth-"));
+         res.add(new SearchDirectory(new File(loc.commonHome()), "hadoop-common-"));
+         res.add(new SearchDirectory(new File(loc.commonHome()), "hadoop-auth-"));
  
-         c.add(new DirAndMask(new File(loc.hdfs), "hadoop-hdfs-"));
+         res.add(new SearchDirectory(new File(loc.hdfsHome()), "hadoop-hdfs-"));
  
-         c.add(new DirAndMask(new File(loc.mapred), "hadoop-mapreduce-client-common"));
-         c.add(new DirAndMask(new File(loc.mapred), "hadoop-mapreduce-client-core"));
+         res.add(new SearchDirectory(new File(loc.mapredHome()), "hadoop-mapreduce-client-common"));
+         res.add(new SearchDirectory(new File(loc.mapredHome()), "hadoop-mapreduce-client-core"));
  
-         return c;
+         return res;
      }
  
++//    /**
++//     * Simple pair-like structure to hold directory name and a mask assigned to it.
++//     */
++//    public static class DirAndMask {
++//        /**
++//         * Constructor.
++//         *
++//         * @param dir The directory.
++//         * @param mask The mask.
++//         */
++//        DirAndMask(File dir, String mask) {
++//            this.dir = dir;
++//            this.mask = mask;
++//        }
++//
++//        /** The path. */
++//        public final File dir;
++//
++//        /** The mask. */
++//        public final String mask;
++//    }
++
      /**
-      * Simple pair-like structure to hold directory name and a mask assigned to it.
+      * Resolves a Hadoop location directory.
+      *
+      * @param envVarName Environment variable name. The value denotes the location path.
+      * @param hadoopHome Hadoop home location, may be null.
+      * @param expHadoopHomeRelativePath The path relative to Hadoop home, expected to start
with path separator.
+      * @throws IOException If the value cannot be resolved to an existing directory.
       */
-     public static class DirAndMask {
-         /**
-          * Constructor.
-          *
-          * @param dir The directory.
-          * @param mask The mask.
-          */
-         DirAndMask(File dir, String mask) {
-             this.dir = dir;
-             this.mask = mask;
+     private static String resolveLocation(String envVarName, String hadoopHome, String expHadoopHomeRelativePath)
+         throws IOException {
 -        String val = systemOrEnv(envVarName, null);
++        String val = getEnv(envVarName, null);
+ 
+         if (val == null) {
+             // The env. variable is not set. Try to resolve the location relative HADOOP_HOME:
 -            if (!directoryExists(hadoopHome))
++            if (!isExistingDirectory(hadoopHome))
+                 throw new IOException("Failed to resolve Hadoop installation location. "
+
+                         envVarName + " or HADOOP_HOME environment variable should be set.");
+ 
+             val = hadoopHome + expHadoopHomeRelativePath;
          }
  
-         /** The path. */
-         public final File dir;
 -        if (!directoryExists(val))
++        if (!isExistingDirectory(val))
+             throw new IOException("Failed to resolve Hadoop location [path=" + val + ']');
  
-         /** The mask. */
-         public final String mask;
+         return val;
      }
  
      /**


Mime
View raw message