incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject git commit: Fixing broken hadoop2 build.
Date Sat, 18 Apr 2015 01:02:46 GMT
Repository: incubator-blur
Updated Branches:
  refs/heads/master 18c8db8f6 -> fd94b8506


Fixing broken hadoop2 build.


Project: http://git-wip-us.apache.org/repos/asf/incubator-blur/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-blur/commit/fd94b850
Tree: http://git-wip-us.apache.org/repos/asf/incubator-blur/tree/fd94b850
Diff: http://git-wip-us.apache.org/repos/asf/incubator-blur/diff/fd94b850

Branch: refs/heads/master
Commit: fd94b8506946f24e117066660406e439646896b8
Parents: 18c8db8
Author: Aaron McCurry <amccurry@gmail.com>
Authored: Fri Apr 17 21:02:45 2015 -0400
Committer: Aaron McCurry <amccurry@gmail.com>
Committed: Fri Apr 17 21:02:45 2015 -0400

----------------------------------------------------------------------
 .../lib/BlurOutputFormatMiniClusterTest.java    |  4 --
 .../apache/blur/store/hdfs/HdfsDirectory.java   | 16 +++---
 .../blur/store/hdfs_v2/HdfsKeyValueStore.java   | 30 ++---------
 .../apache/blur/store/hdfs_v2/HdfsUtils.java    | 52 ++++++++++++++++++++
 4 files changed, 63 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/fd94b850/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurOutputFormatMiniClusterTest.java
----------------------------------------------------------------------
diff --git a/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurOutputFormatMiniClusterTest.java
b/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurOutputFormatMiniClusterTest.java
index f1820b1..d3bd4e7 100644
--- a/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurOutputFormatMiniClusterTest.java
+++ b/blur-mapred/src/test/java/org/apache/blur/mapreduce/lib/BlurOutputFormatMiniClusterTest.java
@@ -90,10 +90,6 @@ public class BlurOutputFormatMiniClusterTest {
     miniCluster = new MiniCluster();
     miniCluster.startBlurCluster(new File(testDirectory, "cluster").getAbsolutePath(), 2,
3, true, false);
 
-    // System.setProperty("test.build.data",
-    // "./target/BlurOutputFormatTest/data");
-    // TEST_ROOT_DIR = new Path(System.getProperty("test.build.data",
-    // "target/tmp/BlurOutputFormatTest_tmp"));
     TEST_ROOT_DIR = new Path(miniCluster.getFileSystemUri().toString() + "/blur_test");
     System.setProperty("hadoop.log.dir", "./target/BlurOutputFormatTest/hadoop_log");
     try {

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/fd94b850/blur-store/src/main/java/org/apache/blur/store/hdfs/HdfsDirectory.java
----------------------------------------------------------------------
diff --git a/blur-store/src/main/java/org/apache/blur/store/hdfs/HdfsDirectory.java b/blur-store/src/main/java/org/apache/blur/store/hdfs/HdfsDirectory.java
index 1e703c5..f048dc9 100644
--- a/blur-store/src/main/java/org/apache/blur/store/hdfs/HdfsDirectory.java
+++ b/blur-store/src/main/java/org/apache/blur/store/hdfs/HdfsDirectory.java
@@ -46,6 +46,7 @@ import org.apache.blur.BlurConfiguration;
 import org.apache.blur.log.Log;
 import org.apache.blur.log.LogFactory;
 import org.apache.blur.store.blockcache.LastModified;
+import org.apache.blur.store.hdfs_v2.HdfsUtils;
 import org.apache.blur.trace.Trace;
 import org.apache.blur.trace.Tracer;
 import org.apache.commons.io.IOUtils;
@@ -56,10 +57,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.hdfs.DFSClient;
-import org.apache.hadoop.hdfs.DFSClient.DFSInputStream;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.lucene.store.BufferedIndexOutput;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.IOContext;
@@ -528,12 +526,12 @@ public class HdfsDirectory extends Directory implements LastModified,
HdfsSymlin
     Tracer trace = Trace.trace("filesystem - length", Trace.param("path", path));
     try {
       if (_fileSystem instanceof DistributedFileSystem) {
-        DistributedFileSystem distributedFileSystem = (DistributedFileSystem) _fileSystem;
-        DFSClient client = distributedFileSystem.getClient();
-        DFSInputStream inputStream = client.open(path.toUri().getPath());
-        long fileLength = inputStream.getFileLength();
-        inputStream.close();
-        return fileLength;
+        FSDataInputStream in = _fileSystem.open(path);
+        try {
+          return HdfsUtils.getFileLength(_fileSystem, path, in);
+        } finally {
+          in.close();
+        }
       } else {
         return _fileSystem.getFileStatus(path).getLen();
       }

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/fd94b850/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStore.java
----------------------------------------------------------------------
diff --git a/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStore.java
b/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStore.java
index 40600ac..c769997 100644
--- a/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStore.java
+++ b/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStore.java
@@ -18,11 +18,8 @@ package org.apache.blur.store.hdfs_v2;
 
 import java.io.DataInput;
 import java.io.DataOutput;
-import java.io.FilterInputStream;
 import java.io.IOException;
 import java.io.UnsupportedEncodingException;
-import java.lang.reflect.Field;
-import java.lang.reflect.Method;
 import java.util.Arrays;
 import java.util.Comparator;
 import java.util.HashSet;
@@ -61,11 +58,9 @@ public class HdfsKeyValueStore implements Store {
 
   public static final int DEFAULT_MAX_AMOUNT_ALLOWED_PER_FILE = 64 * 1024 * 1024;
   public static final long DEFAULT_MAX_OPEN_FOR_WRITING = TimeUnit.MINUTES.toMillis(1);
-  
+
   private static final String UTF_8 = "UTF-8";
   private static final String BLUR_KEY_VALUE = "blur_key_value";
-  private static final String IN = "in";
-  private static final String GET_FILE_LENGTH = "getFileLength";
   private static final Log LOG = LogFactory.getLog(HdfsKeyValueStore.class);
   private static final byte[] MAGIC;
   private static final int VERSION = 1;
@@ -205,7 +200,7 @@ public class HdfsKeyValueStore implements Store {
     for (FileStatus fileStatus : _fileStatus.get()) {
       Path path = fileStatus.getPath();
       FSDataInputStream inputStream = _fileSystem.open(path);
-      long len = getFileLength(path, inputStream);
+      long len = HdfsUtils.getFileLength(_fileSystem, path, inputStream);
       inputStream.close();
       if (len < MAGIC.length + VERSION_LENGTH) {
         // Remove invalid file
@@ -516,12 +511,6 @@ public class HdfsKeyValueStore implements Store {
     }
   }
 
-  private long getFileLength(Path path, FSDataInputStream inputStream) throws IOException
{
-    FileStatus fileStatus = _fileSystem.getFileStatus(path);
-    long dfsLength = getDFSLength(inputStream);
-    return Math.max(dfsLength, fileStatus.getLen());
-  }
-
   private void syncInternal() throws IOException {
     _output.flush();
     _output.sync();
@@ -539,7 +528,7 @@ public class HdfsKeyValueStore implements Store {
     }
     int version = inputStream.readInt();
     if (version == 1) {
-      long fileLength = getFileLength(path, inputStream);
+      long fileLength = HdfsUtils.getFileLength(_fileSystem, path, inputStream);
       Operation operation = new Operation();
       try {
         while (inputStream.getPos() < fileLength) {
@@ -592,16 +581,5 @@ public class HdfsKeyValueStore implements Store {
     return new TreeSet<FileStatus>();
   }
 
-  private long getDFSLength(FSDataInputStream inputStream) throws IOException {
-    try {
-      Field field = FilterInputStream.class.getDeclaredField(IN);
-      field.setAccessible(true);
-      Object dfs = field.get(inputStream);
-      Method method = dfs.getClass().getMethod(GET_FILE_LENGTH, new Class[] {});
-      Object length = method.invoke(dfs, new Object[] {});
-      return (Long) length;
-    } catch (Exception e) {
-      throw new IOException(e);
-    }
-  }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/fd94b850/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsUtils.java
----------------------------------------------------------------------
diff --git a/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsUtils.java b/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsUtils.java
new file mode 100644
index 0000000..2973679
--- /dev/null
+++ b/blur-store/src/main/java/org/apache/blur/store/hdfs_v2/HdfsUtils.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.blur.store.hdfs_v2;
+
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+
+public class HdfsUtils {
+
+  private static final String IN = "in";
+  private static final String GET_FILE_LENGTH = "getFileLength";
+
+  public static long getFileLength(FileSystem fileSystem, Path path, FSDataInputStream inputStream)
throws IOException {
+    FileStatus fileStatus = fileSystem.getFileStatus(path);
+    long dfsLength = getDFSLength(inputStream);
+    return Math.max(dfsLength, fileStatus.getLen());
+  }
+
+  public static long getDFSLength(FSDataInputStream inputStream) throws IOException {
+    try {
+      Field field = FilterInputStream.class.getDeclaredField(IN);
+      field.setAccessible(true);
+      Object dfs = field.get(inputStream);
+      Method method = dfs.getClass().getMethod(GET_FILE_LENGTH, new Class[] {});
+      Object length = method.invoke(dfs, new Object[] {});
+      return (Long) length;
+    } catch (Exception e) {
+      throw new IOException(e);
+    }
+  }
+}


Mime
View raw message