incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject git commit: Adding more blur hive tests and this fixes the tests when hadoop2 is the profile.
Date Mon, 26 Jan 2015 01:26:19 GMT
Repository: incubator-blur
Updated Branches:
  refs/heads/master fad57102e -> 0e61cd404


Adding more blur hive tests and this fixes the tests when hadoop2 is the profile.


Project: http://git-wip-us.apache.org/repos/asf/incubator-blur/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-blur/commit/0e61cd40
Tree: http://git-wip-us.apache.org/repos/asf/incubator-blur/tree/0e61cd40
Diff: http://git-wip-us.apache.org/repos/asf/incubator-blur/diff/0e61cd40

Branch: refs/heads/master
Commit: 0e61cd4048b20e902318e2f4e4caa4cee0621da5
Parents: fad5710
Author: Aaron McCurry <amccurry@gmail.com>
Authored: Sun Jan 25 20:26:12 2015 -0500
Committer: Aaron McCurry <amccurry@gmail.com>
Committed: Sun Jan 25 20:26:12 2015 -0500

----------------------------------------------------------------------
 .../test/java/org/apache/blur/MiniCluster.java  | 122 +++++++++++++++++++
 blur-hive/pom.xml                               | 109 +++++++++++++----
 .../apache/blur/hive/NullHiveInputFormat.java   |   3 +-
 .../org/apache/blur/hive/BlurSerDeTest.java     | 113 +++++++++++------
 pom.xml                                         |   7 +-
 5 files changed, 289 insertions(+), 65 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0e61cd40/blur-core/src/test/java/org/apache/blur/MiniCluster.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/MiniCluster.java b/blur-core/src/test/java/org/apache/blur/MiniCluster.java
index 12688a3..542cc5f 100644
--- a/blur-core/src/test/java/org/apache/blur/MiniCluster.java
+++ b/blur-core/src/test/java/org/apache/blur/MiniCluster.java
@@ -37,7 +37,9 @@ import java.io.InputStreamReader;
 import java.io.OutputStream;
 import java.lang.reflect.Field;
 import java.net.URI;
+import java.net.URL;
 import java.util.ArrayList;
+import java.util.Enumeration;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
@@ -50,6 +52,12 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 
+import javax.script.Bindings;
+import javax.script.ScriptContext;
+import javax.script.ScriptEngine;
+import javax.script.ScriptEngineManager;
+import javax.script.ScriptException;
+
 import org.apache.blur.log.Log;
 import org.apache.blur.log.LogFactory;
 import org.apache.blur.store.buffer.BufferStore;
@@ -81,6 +89,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.util.VersionInfo;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.WatchedEvent;
 import org.apache.zookeeper.Watcher;
@@ -95,6 +104,9 @@ public class MiniCluster {
   private List<MiniClusterServer> controllers = new ArrayList<MiniClusterServer>();
   private List<MiniClusterServer> shards = new ArrayList<MiniClusterServer>();
   private ThreadGroup group = new ThreadGroup(id);
+  private Configuration _conf;
+  private Object mrMiniCluster;
+  private Configuration _mrConf;
 
   public static void main(String[] args) throws IOException, InterruptedException, KeeperException,
BlurException,
       TException {
@@ -170,6 +182,111 @@ public class MiniCluster {
     }
   }
 
+  public void stopMrMiniCluster() throws IOException {
+    ScriptEngineManager manager = new ScriptEngineManager();
+    ScriptEngine engine = manager.getEngineByName("js");
+    if (useYarn()) {
+      engine.put("mrMiniCluster", mrMiniCluster);
+      try {
+        engine.eval("mrMiniCluster.stop();");
+      } catch (ScriptException e) {
+        throw new IOException(e);
+      }
+    } else {
+      engine.put("mrMiniCluster", mrMiniCluster);
+      try {
+        engine.eval("mrMiniCluster.shutdown();");
+      } catch (ScriptException e) {
+        throw new IOException(e);
+      }
+    }
+  }
+
+  public void startMrMiniCluster() throws IOException {
+    _mrConf = startMrMiniClusterInternal();
+  }
+
+  public Configuration getMRConfiguration() {
+    return _mrConf;
+  }
+
+  private Configuration startMrMiniClusterInternal() throws IOException {
+    String fileSystemUri = getFileSystemUri().toString();
+    ScriptEngineManager manager = new ScriptEngineManager();
+    ScriptEngine engine = manager.getEngineByName("js");
+
+    if (useYarn()) {
+      int nodeManagers = 1;
+      Class<?> c = getClass();
+      engine.put("c", c);
+      engine.put("nodeManagers", nodeManagers);
+      engine.put("fileSystemUri", fileSystemUri);
+      try {
+        engine.eval("conf = new org.apache.hadoop.yarn.conf.YarnConfiguration()");
+        engine.eval("org.apache.hadoop.fs.FileSystem.setDefaultUri(conf, fileSystemUri);");
+        engine
+            .eval("mrMiniCluster = org.apache.hadoop.mapred.MiniMRClientClusterFactory.create(c,
nodeManagers, conf);");
+        engine.eval("mrMiniCluster.start();");
+        engine.eval("configuration = mrMiniCluster.getConfig();");
+      } catch (ScriptException e) {
+        throw new IOException(e);
+      }
+
+      Bindings bindings = engine.getBindings(ScriptContext.ENGINE_SCOPE);
+      mrMiniCluster = bindings.get("mrMiniCluster");
+      return (Configuration) bindings.get("configuration");
+    } else {
+      int numTaskTrackers = 1;
+      int numDir = 1;
+      engine.put("fileSystemUri", fileSystemUri);
+      engine.put("numTaskTrackers", numTaskTrackers);
+      engine.put("numDir", numDir);
+
+      try {
+        engine
+            .eval("mrMiniCluster = new org.apache.hadoop.mapred.MiniMRCluster(numTaskTrackers,
fileSystemUri, numDir);");
+        engine.eval("configuration = mrMiniCluster.createJobConf();");
+      } catch (ScriptException e) {
+        throw new IOException(e);
+      }
+      Bindings bindings = engine.getBindings(ScriptContext.ENGINE_SCOPE);
+      mrMiniCluster = bindings.get("mrMiniCluster");
+      return (Configuration) bindings.get("configuration");
+    }
+  }
+
+  private boolean useYarn() {
+    String version = VersionInfo.getVersion();
+    if (version.startsWith("0.20.") || version.startsWith("1.")) {
+      return false;
+    }
+    // Check for mr1 hadoop2
+    if (isMr1Hadoop2()) {
+      return false;
+    }
+    return true;
+  }
+
+  private boolean isMr1Hadoop2() {
+    try {
+      Enumeration<URL> e = ClassLoader.getSystemClassLoader().getResources(
+          "META-INF/maven/org.apache.hadoop/hadoop-client/pom.properties");
+      while (e.hasMoreElements()) {
+        URL url = e.nextElement();
+        InputStream stream = url.openStream();
+        Properties properties = new Properties();
+        properties.load(stream);
+        Object object = properties.get("version");
+        if (object.toString().contains("mr1")) {
+          return true;
+        }
+      }
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+    return false;
+  }
+
   private void waitForSafeModeToExit() throws BlurException, TException, IOException {
     String controllerConnectionStr = getControllerConnectionStr();
     Iface client = BlurClient.getClient(controllerConnectionStr);
@@ -597,6 +714,10 @@ public class MiniCluster {
     server.waitUntilOnline();
   }
 
+  public Configuration getConfiguration() {
+    return _conf;
+  }
+
   public String getZkConnectionString() {
     return zkMiniCluster.getZkConnectionString();
   }
@@ -646,6 +767,7 @@ public class MiniCluster {
   }
 
   public void startDfs(Configuration conf, boolean format, String path) {
+    _conf = conf;
     String perm;
     Path p = new Path(new File(path).getAbsolutePath());
     try {

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0e61cd40/blur-hive/pom.xml
----------------------------------------------------------------------
diff --git a/blur-hive/pom.xml b/blur-hive/pom.xml
index b0c12e2..4074f9c 100644
--- a/blur-hive/pom.xml
+++ b/blur-hive/pom.xml
@@ -38,30 +38,7 @@
 			<version>${slf4j.version}</version>
 			<scope>provided</scope>
 		</dependency>
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-serde</artifactId>
-			<version>${hive.version}</version>
-			<scope>provided</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-exec</artifactId>
-			<version>${hive.version}</version>
-			<scope>provided</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-cli</artifactId>
-			<version>${hive.version}</version>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.hive</groupId>
-			<artifactId>hive-jdbc</artifactId>
-			<version>${hive.version}</version>
-			<scope>test</scope>
-		</dependency>
+
 		<dependency>
 			<groupId>org.apache.blur</groupId>
 			<artifactId>blur-core</artifactId>
@@ -196,6 +173,30 @@
 					<version>${hadoop.version}</version>
 					<scope>test</scope>
 				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-exec</artifactId>
+					<version>${hive.version}</version>
+					<scope>provided</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-serde</artifactId>
+					<version>${hive.version}</version>
+					<scope>provided</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-cli</artifactId>
+					<version>${hive.version}</version>
+					<scope>test</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-jdbc</artifactId>
+					<version>${hive.version}</version>
+					<scope>test</scope>
+				</dependency>
 			</dependencies>
 		</profile>
 		<profile>
@@ -241,6 +242,30 @@
 						</exclusion>
 					</exclusions>
 				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-exec</artifactId>
+					<version>${hive.version}</version>
+					<scope>provided</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-serde</artifactId>
+					<version>${hive.version}</version>
+					<scope>provided</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-cli</artifactId>
+					<version>${hive.version}</version>
+					<scope>test</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-jdbc</artifactId>
+					<version>${hive.version}</version>
+					<scope>test</scope>
+				</dependency>
 			</dependencies>
 		</profile>
 		<profile>
@@ -253,9 +278,13 @@
 			<dependencies>
 				<dependency>
 					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-common</artifactId>
+					<version>${hadoop.version}</version>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
 					<artifactId>hadoop-client</artifactId>
 					<version>${hadoop.version}</version>
-					<scope>provided</scope>
 				</dependency>
 				<dependency>
 					<groupId>org.apache.hadoop</groupId>
@@ -264,6 +293,12 @@
 					<scope>test</scope>
 				</dependency>
 				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-hdfs</artifactId>
+					<version>${hadoop.version}</version>
+					<scope>test</scope>
+				</dependency>
+				<dependency>
 					<groupId>org.apache.blur</groupId>
 					<artifactId>blur-mapred-hadoop2</artifactId>
 					<version>${project.version}</version>
@@ -286,6 +321,30 @@
 						</exclusion>
 					</exclusions>
 				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-exec</artifactId>
+					<version>${hive.version}</version>
+					<scope>provided</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-serde</artifactId>
+					<version>${hive.version}</version>
+					<scope>provided</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-cli</artifactId>
+					<version>${hive.version}</version>
+					<scope>test</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hive</groupId>
+					<artifactId>hive-jdbc</artifactId>
+					<version>${hive.version}</version>
+					<scope>test</scope>
+				</dependency>
 			</dependencies>
 		</profile>
 	</profiles>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0e61cd40/blur-hive/src/main/java/org/apache/blur/hive/NullHiveInputFormat.java
----------------------------------------------------------------------
diff --git a/blur-hive/src/main/java/org/apache/blur/hive/NullHiveInputFormat.java b/blur-hive/src/main/java/org/apache/blur/hive/NullHiveInputFormat.java
index 62eb656..3a0b054 100644
--- a/blur-hive/src/main/java/org/apache/blur/hive/NullHiveInputFormat.java
+++ b/blur-hive/src/main/java/org/apache/blur/hive/NullHiveInputFormat.java
@@ -18,8 +18,7 @@ package org.apache.blur.hive;
 
 import java.io.IOException;
 
-import groovy.lang.Writable;
-
+import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0e61cd40/blur-hive/src/test/java/org/apache/blur/hive/BlurSerDeTest.java
----------------------------------------------------------------------
diff --git a/blur-hive/src/test/java/org/apache/blur/hive/BlurSerDeTest.java b/blur-hive/src/test/java/org/apache/blur/hive/BlurSerDeTest.java
index a6f87b0..b504ffe 100644
--- a/blur-hive/src/test/java/org/apache/blur/hive/BlurSerDeTest.java
+++ b/blur-hive/src/test/java/org/apache/blur/hive/BlurSerDeTest.java
@@ -21,7 +21,8 @@ import static org.junit.Assert.assertEquals;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintWriter;
-import java.lang.reflect.Method;
+import java.lang.reflect.Field;
+import java.net.ServerSocket;
 import java.sql.Connection;
 import java.sql.Date;
 import java.sql.DriverManager;
@@ -55,10 +56,12 @@ import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.mapred.MiniMRCluster;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hive.jdbc.HiveDriver;
+import org.apache.hive.service.server.HiveServer2;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -67,8 +70,12 @@ import org.junit.Test;
 
 public class BlurSerDeTest {
 
-  private static final File DERBY_FILE = new File("derby.log");
-  private static final File METASTORE_DB_FILE = new File("metastore_db");
+  public static final File WAREHOUSE = new File("./target/tmp/warehouse");
+  public static final String COLUMN_SEP = new String(new char[] { 1 });
+  public static final String ITEM_SEP = new String(new char[] { 2 });
+  public static final File DERBY_FILE = new File("derby.log");
+  public static final File METASTORE_DB_FILE = new File("metastore_db");
+
   private static final String FAM = "fam0";
   private static final String YYYYMMDD = "yyyyMMdd";
   private static final String YYYY_MM_DD = "yyyy-MM-dd";
@@ -76,9 +83,6 @@ public class BlurSerDeTest {
   private static final File TMPDIR = new File(System.getProperty("blur.tmp.dir", "./target/tmp_BlurSerDeTest"));
   private static MiniCluster miniCluster;
   private static boolean externalProcesses = true;
-  private static final File WAREHOUSE = new File("./target/tmp/warehouse");
-  private static final String COLUMN_SEP = new String(new char[] { 1 });
-  private static final String ITEM_SEP = new String(new char[] { 2 });
 
   @BeforeClass
   public static void startCluster() throws IOException {
@@ -110,8 +114,6 @@ public class BlurSerDeTest {
     miniCluster.shutdownBlurCluster();
   }
 
-  private Object mrMiniCluster;
-
   @Before
   public void setup() throws BlurException, TException, IOException {
     String controllerConnectionStr = miniCluster.getControllerConnectionStr();
@@ -159,7 +161,7 @@ public class BlurSerDeTest {
     rmr(DERBY_FILE);
   }
 
-  private void rmr(File file) {
+  public static void rmr(File file) {
     if (!file.exists()) {
       return;
     }
@@ -254,9 +256,20 @@ public class BlurSerDeTest {
   }
 
   @Test
-  public void test2() throws SQLException, ClassNotFoundException, IOException, BlurException,
TException {
+  public void test2() throws SQLException, ClassNotFoundException, IOException, BlurException,
TException, InterruptedException {
+    miniCluster.startMrMiniCluster();
+    Configuration configuration = miniCluster.getMRConfiguration();
+    HiveConf hiveConf = new HiveConf(configuration, getClass());
+    hiveConf.set("hive.server2.thrift.port", "0");
+    HiveServer2 hiveServer2 = new HiveServer2();
+    hiveServer2.init(hiveConf);
+    hiveServer2.start();
+
+    int port = waitForStartupAndGetPort(hiveServer2);
+
     Class.forName(HiveDriver.class.getName());
-    Connection connection = DriverManager.getConnection("jdbc:hive2://");
+    String userName = UserGroupInformation.getCurrentUser().getShortUserName();
+    Connection connection = DriverManager.getConnection("jdbc:hive2://localhost:" + port,
userName, "");
 
     run(connection, "set hive.metastore.warehouse.dir=" + WAREHOUSE.toURI().toString());
     run(connection, "create database if not exists testdb");
@@ -277,11 +290,8 @@ public class BlurSerDeTest {
     generateData(tableDir, totalRecords);
 
     run(connection, "select * from loadtable");
-
-    Configuration configuration = startMrMiniCluster();
-    run(connection, "set mapred.job.tracker=" + configuration.get("mapred.job.tracker"));
     run(connection, "insert into table testtable select * from loadtable");
-    stopMrMiniCluster();
+    miniCluster.stopMrMiniCluster();
     connection.close();
 
     Iface client = BlurClient.getClientFromZooKeeperConnectionStr(miniCluster.getZkConnectionString());
@@ -293,25 +303,6 @@ public class BlurSerDeTest {
     assertEquals(totalRecords, results.getTotalResults());
   }
 
-  private void stopMrMiniCluster() {
-    callMethod(mrMiniCluster, "shutdown");
-  }
-
-  private Object callMethod(Object o, String methodName, Class<?>... classes) {
-    Class<? extends Object> clazz = o.getClass();
-    try {
-      Method method = clazz.getDeclaredMethod(methodName, classes);
-      return method.invoke(o, new Object[] {});
-    } catch (Exception e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  private Configuration startMrMiniCluster() throws IOException {
-    mrMiniCluster = new MiniMRCluster(1, miniCluster.getFileSystemUri().toString(), 1);
-    return (Configuration) callMethod(mrMiniCluster, "createJobConf");
-  }
-
   private void generateData(File file, int totalRecords) throws IOException {
     SimpleDateFormat simpleDateFormat = new SimpleDateFormat(YYYY_MM_DD);
     PrintWriter print = new PrintWriter(new File(file, "data"));
@@ -443,7 +434,7 @@ public class BlurSerDeTest {
     throw new RuntimeException("Can't build create table script.");
   }
 
-  private void run(Connection connection, String sql) throws SQLException {
+  public static void run(Connection connection, String sql) throws SQLException {
     System.out.println("Running:" + sql);
     Statement statement = connection.createStatement();
     if (statement.execute(sql)) {
@@ -480,4 +471,54 @@ public class BlurSerDeTest {
     }
     return map;
   }
+
+  @SuppressWarnings("resource")
+  private int waitForStartupAndGetPort(HiveServer2 hiveServer2) throws InterruptedException
{
+    while (true) {
+      // thriftCLIService->server->serverTransport_->serverSocket_
+      Thread.sleep(100);
+      Object o1 = getObject(hiveServer2, "thriftCLIService");
+      if (o1 == null) {
+        continue;
+      }
+      Object o2 = getObject(o1, "server");
+      if (o2 == null) {
+        continue;
+      }
+      Object o3 = getObject(o2, "serverTransport_");
+      if (o3 == null) {
+        continue;
+      }
+      Object o4 = getObject(o3, "serverSocket_");
+      if (o4 == null) {
+        continue;
+      }
+      ServerSocket socket = (ServerSocket) o4;
+      return socket.getLocalPort();
+    }
+  }
+
+  private Object getObject(Object o, String field) {
+    return getObject(o, field, o.getClass());
+  }
+
+  private Object getObject(Object o, String field, Class<? extends Object> clazz) {
+    try {
+      Field declaredField = clazz.getDeclaredField(field);
+      return getObject(o, declaredField);
+    } catch (NoSuchFieldException e) {
+      return getObject(o, field, clazz.getSuperclass());
+    } catch (SecurityException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private Object getObject(Object o, Field field) {
+    field.setAccessible(true);
+    try {
+      return field.get(o);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
 }

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0e61cd40/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 1983960..491b6aa 100644
--- a/pom.xml
+++ b/pom.xml
@@ -397,13 +397,16 @@ under the License.
 					<groupId>org.apache.maven.plugins</groupId>
 					<artifactId>maven-surefire-plugin</artifactId>
 					<configuration>
-						<argLine>-XX:+UseConcMarkSweepGC -Xmx1g -Xms1g</argLine>
+						<argLine>-XX:+UseConcMarkSweepGC -Xmx1g -Xms1g -XX:MaxPermSize=256m</argLine>
 						<forkCount>2</forkCount>
 						<forkMode>always</forkMode>
 						<reuseForks>false</reuseForks>
 						<systemPropertyVariables>
 							<blur.tmp.dir>${project.build.directory}/target/tmp</blur.tmp.dir>
 						</systemPropertyVariables>
+						<environmentVariables>
+							<JAVA_HOME>${java.home}</JAVA_HOME>
+						</environmentVariables>
 					</configuration>
 				</plugin>
 				<!-- Ignore/Execute plugin execution -->
@@ -528,7 +531,7 @@ under the License.
 				</property>
 			</activation>
 			<properties>
-				<hadoop.version>2.2.0</hadoop.version>
+				<hadoop.version>2.6.0</hadoop.version>
                 <projectVersion>hadoop2-${hadoop.version}-${project.parent.version}</projectVersion>
 			</properties>
 			<modules>


Mime
View raw message