incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject [3/3] git commit: Fixing various bugs in getting the unt tests to run successfully.
Date Sat, 08 Feb 2014 22:55:48 GMT
Fixing various bugs in getting the unt tests to run successfully.


Project: http://git-wip-us.apache.org/repos/asf/incubator-blur/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-blur/commit/0756d357
Tree: http://git-wip-us.apache.org/repos/asf/incubator-blur/tree/0756d357
Diff: http://git-wip-us.apache.org/repos/asf/incubator-blur/diff/0756d357

Branch: refs/heads/apache-blur-0.2
Commit: 0756d357d21da8826cca52db71abe498a69feb72
Parents: c8f1de1
Author: Aaron McCurry <amccurry@gmail.com>
Authored: Sat Feb 8 17:55:38 2014 -0500
Committer: Aaron McCurry <amccurry@gmail.com>
Committed: Sat Feb 8 17:55:38 2014 -0500

----------------------------------------------------------------------
 .../indexserver/DistributedIndexServer.java     |  11 +-
 .../org/apache/blur/utils/BlurUtilsTest.java    |  22 ++--
 blur-store/pom.xml                              |  44 +++++++
 .../blur/store/blockcache_v2/BaseCache.java     |  17 +--
 .../store/hdfs_v2/HdfsKeyValueStoreTest.java    | 124 ++++++++++++++++++-
 5 files changed, 191 insertions(+), 27 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0756d357/blur-core/src/main/java/org/apache/blur/manager/indexserver/DistributedIndexServer.java
----------------------------------------------------------------------
diff --git a/blur-core/src/main/java/org/apache/blur/manager/indexserver/DistributedIndexServer.java
b/blur-core/src/main/java/org/apache/blur/manager/indexserver/DistributedIndexServer.java
index 2e04109..53528bc 100644
--- a/blur-core/src/main/java/org/apache/blur/manager/indexserver/DistributedIndexServer.java
+++ b/blur-core/src/main/java/org/apache/blur/manager/indexserver/DistributedIndexServer.java
@@ -259,9 +259,9 @@ public class DistributedIndexServer extends AbstractDistributedIndexServer
{
   }
 
   private WatchChildren watchForShardServerChanges() {
-
     WatchChildren watchOnlineShards = new WatchChildren(_zookeeper,
-        ZookeeperPathConstants.getOnlineShardsPath(_cluster)).watch(new OnChange() {
+        ZookeeperPathConstants.getOnlineShardsPath(_cluster));
+    watchOnlineShards.watch(new OnChange() {
       private List<String> _prevOnlineShards = new ArrayList<String>();
 
       @Override
@@ -482,18 +482,15 @@ public class DistributedIndexServer extends AbstractDistributedIndexServer
{
 
     ShardContext shardContext = ShardContext.create(tableContext, shard);
 
-    Directory dir;
 
     TableDescriptor descriptor = tableContext.getDescriptor();
     boolean blockCacheEnabled = descriptor.isBlockCaching();
     if (blockCacheEnabled) {
       Set<String> blockCacheFileTypes = descriptor.getBlockCachingFileTypes();
-      dir = _blockCacheDirectoryFactory.newDirectory(table, shard, directory, blockCacheFileTypes);
-    } else {
-      dir = directory;
+      directory = _blockCacheDirectoryFactory.newDirectory(table, shard, directory, blockCacheFileTypes);
     }
 
-    BlurIndex index = tableContext.newInstanceBlurIndex(shardContext, dir, _mergeScheduler,
_gc, _searchExecutor,
+    BlurIndex index = tableContext.newInstanceBlurIndex(shardContext, directory, _mergeScheduler,
_gc, _searchExecutor,
         _indexCloser, _refresher, _warmup);
 
     if (_clusterStatus.isReadOnly(true, _cluster, table)) {

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0756d357/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java
----------------------------------------------------------------------
diff --git a/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java b/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java
index 6e6d505..f91633d 100644
--- a/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java
+++ b/blur-core/src/test/java/org/apache/blur/utils/BlurUtilsTest.java
@@ -37,10 +37,10 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.lucene.analysis.core.KeywordAnalyzer;
 import org.apache.lucene.document.Document;
-import org.apache.lucene.document.Field;
-import org.apache.lucene.document.Field.Index;
 import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.IndexWriterConfig;
@@ -250,15 +250,15 @@ public class BlurUtilsTest {
     IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
     IndexWriter writer = new IndexWriter(directory, conf);
     Document doc = new Document();
-    doc.add(new Field("a", "b", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
-    doc.add(new Field("family", "f1", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
+    doc.add(new StringField("a", "b", Store.YES));
+    doc.add(new StringField("family", "f1", Store.YES));
 
     Document doc1 = new Document();
-    doc1.add(new Field("a", "b", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
+    doc.add(new StringField("a", "b", Store.YES));
     writer.addDocument(doc);
     writer.addDocument(doc1);
     writer.close();
-    return IndexReader.open(directory);
+    return DirectoryReader.open(directory);
   }
 
   private IndexReader getReaderWithDocsHavingFamily() throws CorruptIndexException, LockObtainFailedException,
@@ -267,16 +267,16 @@ public class BlurUtilsTest {
     IndexWriterConfig conf = new IndexWriterConfig(LUCENE_VERSION, new KeywordAnalyzer());
     IndexWriter writer = new IndexWriter(directory, conf);
     Document doc = new Document();
-    doc.add(new Field("a", "b", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
-    doc.add(new Field("family", "f2", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
+    doc.add(new StringField("a", "b", Store.YES));
+    doc.add(new StringField("family", "f2", Store.YES));
 
     Document doc1 = new Document();
-    doc1.add(new Field("a", "b", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
-    doc1.add(new Field("family", "f1", Store.YES, Index.NOT_ANALYZED_NO_NORMS));
+    doc1.add(new StringField("a", "b", Store.YES));
+    doc1.add(new StringField("family", "f1", Store.YES));
     writer.addDocument(doc);
     writer.addDocument(doc1);
     writer.close();
-    return IndexReader.open(directory);
+    return DirectoryReader.open(directory);
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0756d357/blur-store/pom.xml
----------------------------------------------------------------------
diff --git a/blur-store/pom.xml b/blur-store/pom.xml
index 687d33b..35a9dea 100644
--- a/blur-store/pom.xml
+++ b/blur-store/pom.xml
@@ -135,4 +135,48 @@ under the License.
 			</plugin>
 		</plugins>
 	</build>
+	
+	<profiles>
+		<profile>
+			<id>hadoop-1x</id>
+			<activation>
+				<activeByDefault>true</activeByDefault>
+			</activation>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-test</artifactId>
+					<version>${hadoop.version}</version>
+					<scope>test</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+		<profile>
+			<id>cdh4-mr1</id>
+			<dependencies>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-client</artifactId>
+					<version>${hadoop.version}</version>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-core</artifactId>
+					<version>${hadoop.version}</version>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-test</artifactId>
+					<version>${hadoop.version}</version>
+					<scope>test</scope>
+				</dependency>
+				<dependency>
+					<groupId>org.apache.hadoop</groupId>
+					<artifactId>hadoop-minicluster</artifactId>
+					<version>${hadoop.version}</version>
+					<scope>test</scope>
+				</dependency>
+			</dependencies>
+		</profile>
+	</profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0756d357/blur-store/src/main/java/org/apache/blur/store/blockcache_v2/BaseCache.java
----------------------------------------------------------------------
diff --git a/blur-store/src/main/java/org/apache/blur/store/blockcache_v2/BaseCache.java b/blur-store/src/main/java/org/apache/blur/store/blockcache_v2/BaseCache.java
index 796f59d..4805d40 100644
--- a/blur-store/src/main/java/org/apache/blur/store/blockcache_v2/BaseCache.java
+++ b/blur-store/src/main/java/org/apache/blur/store/blockcache_v2/BaseCache.java
@@ -200,14 +200,17 @@ public class BaseCache extends Cache implements Closeable {
       long fileModified = directory.getFileModified(fileName);
       FileIdKey oldKey = new FileIdKey(directory.getDirectoryName(), fileName, -1L);
       FileIdKey newKey = new FileIdKey(directory.getDirectoryName(), fileName, fileModified);
-      long currentFileId = _fileNameToId.get(oldKey);
-      if (fileId != currentFileId) {
-        throw new IOException("Something has gone very wrong file ids do not match [" + fileId
+ "] [" + currentFileId
-            + "] for key [" + oldKey + "]");
+      Long currentFileIdObject = _fileNameToId.get(oldKey);
+      if (currentFileIdObject != null) {
+        long currentFileId = currentFileIdObject;
+        if (fileId != currentFileId) {
+          throw new IOException("Something has gone very wrong file ids do not match [" +
fileId + "] ["
+              + currentFileId + "] for key [" + oldKey + "]");
+        }
+        _fileNameToId.put(newKey, fileId);
+        _oldFileNameIdMap.put(fileId, newKey);
+        _fileNameToId.remove(oldKey);
       }
-      _fileNameToId.put(newKey, fileId);
-      _oldFileNameIdMap.put(fileId, newKey);
-      _fileNameToId.remove(oldKey);
     } else {
       throw new FileNotFoundException("File [" + fileName + "] not found in directory ["
+ directory + "]");
     }

http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/0756d357/blur-store/src/test/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStoreTest.java
----------------------------------------------------------------------
diff --git a/blur-store/src/test/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStoreTest.java
b/blur-store/src/test/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStoreTest.java
index b8668ae..4c098f4 100644
--- a/blur-store/src/test/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStoreTest.java
+++ b/blur-store/src/test/java/org/apache/blur/store/hdfs_v2/HdfsKeyValueStoreTest.java
@@ -19,23 +19,49 @@ package org.apache.blur.store.hdfs_v2;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 
+import java.io.File;
 import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.concurrent.ThreadPoolExecutor;
 
+import org.apache.blur.log.Log;
+import org.apache.blur.log.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.util.BytesRef;
+import org.junit.AfterClass;
 import org.junit.Before;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class HdfsKeyValueStoreTest {
 
-  private Path _path = new Path("hdfs://127.0.0.1:9000/test");
+  private static final Log LOG = LogFactory.getLog(HdfsKeyValueStoreTest.class);
+  private static final File TMPDIR = new File(System.getProperty("blur.tmp.dir", "./target/tmp_HdfsKeyValueStoreTest"));
+
   private Configuration _configuration = new Configuration();
+  private static MiniDFSCluster _cluster;
+  private Path _path;
+
+  @BeforeClass
+  public static void startCluster() {
+    Configuration conf = new Configuration();
+    startDfs(conf, true, TMPDIR.getAbsolutePath());
+  }
+
+  @AfterClass
+  public static void stopCluster() {
+    shutdownDfs();
+  }
 
   @Before
   public void setup() throws IOException {
-    FileSystem fileSystem = _path.getFileSystem(_configuration);
+    FileSystem fileSystem = _cluster.getFileSystem();
+    _path = new Path("/test").makeQualified(fileSystem);
     fileSystem.delete(_path, true);
   }
 
@@ -119,4 +145,98 @@ public class HdfsKeyValueStoreTest {
     return new BytesRef(s);
   }
 
+  public static void startDfs(Configuration conf, boolean format, String path) {
+    String perm;
+    Path p = new Path(new File("./target").getAbsolutePath());
+    try {
+      FileSystem fileSystem = p.getFileSystem(conf);
+      FileStatus fileStatus = fileSystem.getFileStatus(p);
+      FsPermission permission = fileStatus.getPermission();
+      perm = permission.getUserAction().ordinal() + "" + permission.getGroupAction().ordinal()
+ ""
+          + permission.getOtherAction().ordinal();
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+    LOG.info("dfs.datanode.data.dir.perm=" + perm);
+    conf.set("dfs.datanode.data.dir.perm", perm);
+    System.setProperty("test.build.data", path);
+    try {
+      _cluster = new MiniDFSCluster(conf, 1, true, (String[]) null);
+      _cluster.waitActive();
+    } catch (Exception e) {
+      LOG.error("error opening file system", e);
+      throw new RuntimeException(e);
+    }
+  }
+
+  public static void shutdownDfs() {
+    if (_cluster != null) {
+      LOG.info("Shutting down Mini DFS ");
+      try {
+        _cluster.shutdown();
+      } catch (Exception e) {
+        // / Can get a java.lang.reflect.UndeclaredThrowableException thrown
+        // here because of an InterruptedException. Don't let exceptions in
+        // here be cause of test failure.
+      }
+      try {
+        FileSystem fs = _cluster.getFileSystem();
+        if (fs != null) {
+          LOG.info("Shutting down FileSystem");
+          fs.close();
+        }
+        FileSystem.closeAll();
+      } catch (IOException e) {
+        LOG.error("error closing file system", e);
+      }
+
+      // This has got to be one of the worst hacks I have ever had to do.
+      // This is needed to shutdown 2 thread pools that are not shutdown by
+      // themselves.
+      ThreadGroup threadGroup = Thread.currentThread().getThreadGroup();
+      Thread[] threads = new Thread[100];
+      int enumerate = threadGroup.enumerate(threads);
+      for (int i = 0; i < enumerate; i++) {
+        Thread thread = threads[i];
+        if (thread.getName().startsWith("pool")) {
+          if (thread.isAlive()) {
+            thread.interrupt();
+            LOG.info("Stopping ThreadPoolExecutor [" + thread.getName() + "]");
+            Object target = getField(Thread.class, thread, "target");
+            if (target != null) {
+              ThreadPoolExecutor e = (ThreadPoolExecutor) getField(ThreadPoolExecutor.class,
target, "this$0");
+              if (e != null) {
+                e.shutdownNow();
+              }
+            }
+            try {
+              LOG.info("Waiting for thread pool to exit [" + thread.getName() + "]");
+              thread.join();
+            } catch (InterruptedException e) {
+              throw new RuntimeException(e);
+            }
+          }
+        }
+      }
+    }
+  }
+
+  private static Object getField(Class<?> c, Object o, String fieldName) {
+    try {
+      Field field = c.getDeclaredField(fieldName);
+      field.setAccessible(true);
+      return field.get(o);
+    } catch (NoSuchFieldException e) {
+      try {
+        Field field = o.getClass().getDeclaredField(fieldName);
+        field.setAccessible(true);
+        return field.get(o);
+      } catch (Exception ex) {
+        throw new RuntimeException(ex);
+      }
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
 }


Mime
View raw message