Return-Path:
X-Original-To: apmail-lucene-commits-archive@www.apache.org
Delivered-To: apmail-lucene-commits-archive@www.apache.org
Received: from mail.apache.org (hermes.apache.org [140.211.11.3])
by minotaur.apache.org (Postfix) with SMTP id A621AC830
for ;
Mon, 5 Jan 2015 14:28:32 +0000 (UTC)
Received: (qmail 70057 invoked by uid 500); 5 Jan 2015 14:28:33 -0000
Mailing-List: contact commits-help@lucene.apache.org; run by ezmlm
Precedence: bulk
List-Help:
List-Unsubscribe:
List-Post:
List-Id:
Reply-To: dev@lucene.apache.org
Delivered-To: mailing list commits@lucene.apache.org
Received: (qmail 70047 invoked by uid 99); 5 Jan 2015 14:28:33 -0000
Received: from eris.apache.org (HELO hades.apache.org) (140.211.11.105)
by apache.org (qpsmtpd/0.29) with ESMTP; Mon, 05 Jan 2015 14:28:33 +0000
Received: from hades.apache.org (localhost [127.0.0.1])
by hades.apache.org (ASF Mail Server at hades.apache.org) with ESMTP id
05042AC0143;
Mon, 5 Jan 2015 14:28:30 +0000 (UTC)
Content-Type: text/plain; charset="utf-8"
MIME-Version: 1.0
Content-Transfer-Encoding: 7bit
Subject: svn commit: r1649532 [2/2] - in /lucene/dev/trunk: lucene/
lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/
lucene/core/src/java/org/apache/lucene/codecs/
lucene/core/src/java/org/apache/lucene/codecs/compressing/
lucene/core/src/java/org/ap...
Date: Mon, 05 Jan 2015 14:28:29 -0000
To: commits@lucene.apache.org
From: mikemccand@apache.org
X-Mailer: svnmailer-1.0.9
Message-Id: <20150105142830.05042AC0143@hades.apache.org>
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergeState.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergeState.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergeState.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/MergeState.java Mon Jan 5 14:28:28 2015
@@ -26,7 +26,6 @@ import org.apache.lucene.codecs.FieldsPr
import org.apache.lucene.codecs.NormsProducer;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.TermVectorsReader;
-import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.packed.PackedInts;
@@ -73,19 +72,11 @@ public class MergeState {
/** Max docs per reader */
public final int[] maxDocs;
- /** Holds the CheckAbort instance, which is invoked
- * periodically to see if the merge has been aborted. */
- public final CheckAbort checkAbort;
-
/** InfoStream for debugging messages. */
public final InfoStream infoStream;
- /** Counter used for periodic calls to checkAbort
- * @lucene.internal */
- public int checkAbortCount;
-
/** Sole constructor. */
- MergeState(List readers, SegmentInfo segmentInfo, InfoStream infoStream, CheckAbort checkAbort) throws IOException {
+ MergeState(List readers, SegmentInfo segmentInfo, InfoStream infoStream) throws IOException {
int numReaders = readers.size();
docMaps = new DocMap[numReaders];
@@ -148,7 +139,6 @@ public class MergeState {
this.segmentInfo = segmentInfo;
this.infoStream = infoStream;
- this.checkAbort = checkAbort;
setDocMaps(readers);
}
@@ -334,47 +324,6 @@ public class MergeState {
}
/**
- * Class for recording units of work when merging segments.
- */
- public static class CheckAbort {
- private double workCount;
- private final MergePolicy.OneMerge merge;
- private final Directory dir;
-
- /** Creates a #CheckAbort instance. */
- public CheckAbort(MergePolicy.OneMerge merge, Directory dir) {
- this.merge = merge;
- this.dir = dir;
- }
-
- /**
- * Records the fact that roughly units amount of work
- * have been done since this method was last called.
- * When adding time-consuming code into SegmentMerger,
- * you should test different values for units to ensure
- * that the time in between calls to merge.checkAborted
- * is up to ~ 1 second.
- */
- public void work(double units) throws MergePolicy.MergeAbortedException {
- workCount += units;
- if (workCount >= 10000.0) {
- merge.checkAborted(dir);
- workCount = 0;
- }
- }
-
- /** If you use this: IW.close(false) cannot abort your merge!
- * @lucene.internal */
- static final MergeState.CheckAbort NONE = new MergeState.CheckAbort(null, null) {
- @Override
- public void work(double units) {
- // do nothing
- }
- };
- }
-
-
- /**
* Remaps docids around deletes during merge
*/
public static abstract class DocMap {
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/NoMergeScheduler.java Mon Jan 5 14:28:28 2015
@@ -46,5 +46,4 @@ public final class NoMergeScheduler exte
public MergeScheduler clone() {
return this;
}
-
}
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/SegmentMerger.java Mon Jan 5 14:28:28 2015
@@ -49,7 +49,10 @@ final class SegmentMerger {
// note, just like in codec apis Directory 'dir' is NOT the same as segmentInfo.dir!!
SegmentMerger(List readers, SegmentInfo segmentInfo, InfoStream infoStream, Directory dir,
- MergeState.CheckAbort checkAbort, FieldInfos.FieldNumbers fieldNumbers, IOContext context) throws IOException {
+ FieldInfos.FieldNumbers fieldNumbers, IOContext context) throws IOException {
+ if (context.context != IOContext.Context.MERGE) {
+ throw new IllegalArgumentException("IOContext.context should be MERGE; got: " + context.context);
+ }
// validate incoming readers
for (LeafReader reader : readers) {
if ((reader instanceof SegmentReader) == false) {
@@ -59,7 +62,7 @@ final class SegmentMerger {
}
}
- mergeState = new MergeState(readers, segmentInfo, infoStream, checkAbort);
+ mergeState = new MergeState(readers, segmentInfo, infoStream);
directory = dir;
this.codec = segmentInfo.getCodec();
this.context = context;
@@ -81,12 +84,6 @@ final class SegmentMerger {
if (!shouldMerge()) {
throw new IllegalStateException("Merge would result in 0 document segment");
}
- // NOTE: it's important to add calls to
- // checkAbort.work(...) if you make any changes to this
- // method that will spend alot of time. The frequency
- // of this check impacts how long
- // IndexWriter.close(false) takes to actually stop the
- // background merge threads.
mergeFieldInfos();
long t0 = 0;
if (mergeState.infoStream.isEnabled("SM")) {
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java Mon Jan 5 14:28:28 2015
@@ -568,7 +568,7 @@ public class TieredMergePolicy extends M
final int numToMerge = end - maxSegmentCount + 1;
final OneMerge merge = new OneMerge(eligible.subList(end-numToMerge, end));
if (verbose(writer)) {
- message("add final merge=" + merge.segString(writer.getDirectory()), writer);
+ message("add final merge=" + merge.segString(), writer);
}
spec = new MergeSpecification();
spec.add(merge);
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/FilterDirectory.java Mon Jan 5 14:28:28 2015
@@ -23,7 +23,7 @@ import java.util.Collection;
/** Directory implementation that delegates calls to another directory.
* This class can be used to add limitations on top of an existing
* {@link Directory} implementation such as
- * {@link RateLimitedDirectoryWrapper rate limiting} or to add additional
+ * {@link NRTCachingDirectory} or to add additional
* sanity checks for tests. However, if you plan to write your own
* {@link Directory} implementation, you should consider extending directly
* {@link Directory} or {@link BaseDirectory} rather than try to reuse
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimitedIndexOutput.java Mon Jan 5 14:28:28 2015
@@ -24,7 +24,8 @@ import java.io.IOException;
*
* @lucene.internal
*/
-final class RateLimitedIndexOutput extends IndexOutput {
+
+public final class RateLimitedIndexOutput extends IndexOutput {
private final IndexOutput delegate;
private final RateLimiter rateLimiter;
@@ -36,7 +37,7 @@ final class RateLimitedIndexOutput exten
* which does volatile read. */
private long currentMinPauseCheckBytes;
- RateLimitedIndexOutput(final RateLimiter rateLimiter, final IndexOutput delegate) {
+ public RateLimitedIndexOutput(final RateLimiter rateLimiter, final IndexOutput delegate) {
super("RateLimitedIndexOutput(" + delegate + ")");
this.delegate = delegate;
this.rateLimiter = rateLimiter;
@@ -72,7 +73,7 @@ final class RateLimitedIndexOutput exten
delegate.writeBytes(b, offset, length);
}
- private void checkRate() {
+ private void checkRate() throws IOException {
if (bytesSinceLastPause > currentMinPauseCheckBytes) {
rateLimiter.pause(bytesSinceLastPause);
bytesSinceLastPause = 0;
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java Mon Jan 5 14:28:28 2015
@@ -17,6 +17,8 @@ package org.apache.lucene.store;
* limitations under the License.
*/
+import java.io.IOException;
+
import org.apache.lucene.util.ThreadInterruptedException;
/** Abstract base class to rate limit IO. Typically implementations are
@@ -27,14 +29,14 @@ import org.apache.lucene.util.ThreadInte
public abstract class RateLimiter {
/**
- * Sets an updated mb per second rate limit.
+ * Sets an updated MB per second rate limit.
*/
- public abstract void setMbPerSec(double mbPerSec);
+ public abstract void setMBPerSec(double mbPerSec);
/**
- * The current mb per second rate limit.
+ * The current MB per second rate limit.
*/
- public abstract double getMbPerSec();
+ public abstract double getMBPerSec();
/** Pauses, if necessary, to keep the instantaneous IO
* rate at or below the target.
@@ -43,7 +45,7 @@ public abstract class RateLimiter {
*
* @return the pause time in nano seconds
* */
- public abstract long pause(long bytes);
+ public abstract long pause(long bytes) throws IOException;
/** How many bytes caller should add up itself before invoking {@link #pause}. */
public abstract long getMinPauseCheckBytes();
@@ -65,7 +67,7 @@ public abstract class RateLimiter {
/** mbPerSec is the MB/sec max IO rate */
public SimpleRateLimiter(double mbPerSec) {
- setMbPerSec(mbPerSec);
+ setMBPerSec(mbPerSec);
lastNS = System.nanoTime();
}
@@ -73,7 +75,7 @@ public abstract class RateLimiter {
* Sets an updated mb per second rate limit.
*/
@Override
- public void setMbPerSec(double mbPerSec) {
+ public void setMBPerSec(double mbPerSec) {
this.mbPerSec = mbPerSec;
minPauseCheckBytes = (long) ((MIN_PAUSE_CHECK_MSEC / 1000.0) * mbPerSec * 1024 * 1024);
}
@@ -87,7 +89,7 @@ public abstract class RateLimiter {
* The current mb per second rate limit.
*/
@Override
- public double getMbPerSec() {
+ public double getMBPerSec() {
return this.mbPerSec;
}
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/PrintStreamInfoStream.java Mon Jan 5 14:28:28 2015
@@ -19,7 +19,10 @@ package org.apache.lucene.util;
import java.io.IOException;
import java.io.PrintStream;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.Locale;
import java.util.concurrent.atomic.AtomicInteger;
/**
@@ -32,6 +35,8 @@ public class PrintStreamInfoStream exten
// Used for printing messages
private static final AtomicInteger MESSAGE_ID = new AtomicInteger();
protected final int messageID;
+
+ private static final DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS", Locale.ROOT);
protected final PrintStream stream;
@@ -46,7 +51,7 @@ public class PrintStreamInfoStream exten
@Override
public void message(String component, String message) {
- stream.println(component + " " + messageID + " [" + new Date() + "; " + Thread.currentThread().getName() + "]: " + message);
+ stream.println(component + " " + messageID + " [" + dateFormat.format(new Date()) + "; " + Thread.currentThread().getName() + "]: " + message);
}
@Override
Modified: lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/StringHelper.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/StringHelper.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/StringHelper.java (original)
+++ lucene/dev/trunk/lucene/core/src/java/org/apache/lucene/util/StringHelper.java Mon Jan 5 14:28:28 2015
@@ -253,7 +253,7 @@ public abstract class StringHelper {
x0 = Long.parseLong(prop, 16);
x1 = x0;
} else {
- // "Ghetto randomess" from 3 different sources:
+ // Randomess from 3 different sources:
x0 = System.nanoTime();
x1 = StringHelper.class.hashCode() << 32;
StringBuilder sb = new StringBuilder();
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/TestMergeSchedulerExternal.java Mon Jan 5 14:28:28 2015
@@ -16,6 +16,7 @@ package org.apache.lucene;
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -56,21 +57,20 @@ public class TestMergeSchedulerExternal
@Override
protected MergeThread getMergeThread(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
MergeThread thread = new MyMergeThread(writer, merge);
- thread.setThreadPriority(getMergeThreadPriority());
thread.setDaemon(true);
thread.setName("MyMergeThread");
return thread;
}
@Override
- protected void handleMergeException(Throwable t) {
+ protected void handleMergeException(Directory dir, Throwable t) {
excCalled = true;
}
- @Override
- protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ ;@Override
+ protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
mergeCalled = true;
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
}
@@ -118,7 +118,7 @@ public class TestMergeSchedulerExternal
OneMerge merge = null;
while ((merge = writer.getNextMerge()) != null) {
if (VERBOSE) {
- System.out.println("executing merge " + merge.segString(writer.getDirectory()));
+ System.out.println("executing merge " + merge.segString());
}
writer.merge(merge);
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java Mon Jan 5 14:28:28 2015
@@ -293,7 +293,7 @@ public class TestConcurrentMergeSchedule
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
@Override
- protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
try {
// Stall all incoming merges until we see
// maxMergeCount:
@@ -312,7 +312,7 @@ public class TestConcurrentMergeSchedule
// Then sleep a bit to give a chance for the bug
// (too many pending merges) to appear:
Thread.sleep(20);
- super.doMerge(merge);
+ super.doMerge(writer, merge);
} finally {
runningMergeCount.decrementAndGet();
}
@@ -358,10 +358,10 @@ public class TestConcurrentMergeSchedule
}
@Override
- public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
totMergedBytes += merge.totalBytesSize();
atLeastOneMerge.countDown();
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
}
@@ -428,7 +428,7 @@ public class TestConcurrentMergeSchedule
final AtomicInteger runningMergeCount = new AtomicInteger();
@Override
- public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
int count = runningMergeCount.incrementAndGet();
// evil?
synchronized (this) {
@@ -437,7 +437,7 @@ public class TestConcurrentMergeSchedule
}
}
try {
- super.doMerge(merge);
+ super.doMerge(writer, merge);
} finally {
runningMergeCount.decrementAndGet();
}
@@ -489,7 +489,7 @@ public class TestConcurrentMergeSchedule
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
@Override
- protected void maybeStall() {
+ protected void maybeStall(IndexWriter writer) {
wasCalled.set(true);
}
});
@@ -514,14 +514,14 @@ public class TestConcurrentMergeSchedule
final CountDownLatch mergeFinish = new CountDownLatch(1);
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler() {
@Override
- protected void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ protected void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
mergeStart.countDown();
try {
mergeFinish.await();
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
};
cms.setMaxMergesAndThreads(1, 1);
@@ -629,7 +629,7 @@ public class TestConcurrentMergeSchedule
ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
cms.setDefaultMaxMergesAndThreads(true);
assertEquals(1, cms.getMaxThreadCount());
- assertEquals(2, cms.getMaxMergeCount());
+ assertEquals(6, cms.getMaxMergeCount());
}
public void testNonSpinningDefaults() throws Exception {
@@ -637,7 +637,7 @@ public class TestConcurrentMergeSchedule
cms.setDefaultMaxMergesAndThreads(false);
int threadCount = cms.getMaxThreadCount();
assertTrue(threadCount >= 1);
- assertTrue(threadCount <= 3);
- assertEquals(cms.getMaxMergeCount(), 2+threadCount);
+ assertTrue(threadCount <= 4);
+ assertEquals(5+threadCount, cms.getMaxMergeCount());
}
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestDoc.java Mon Jan 5 14:28:28 2015
@@ -39,8 +39,8 @@ import org.apache.lucene.document.TextFi
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.FSLockFactory;
import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.InfoStream;
@@ -48,232 +48,231 @@ import org.apache.lucene.util.LuceneTest
import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.Version;
-
/** JUnit adaptation of an older test case DocTest. */
public class TestDoc extends LuceneTestCase {
- private Path workDir;
- private Path indexDir;
- private LinkedList files;
-
- /** Set the test case. This test case needs
- * a few text files created in the current working directory.
- */
- @Override
- public void setUp() throws Exception {
- super.setUp();
- if (VERBOSE) {
- System.out.println("TEST: setUp");
- }
- workDir = createTempDir("TestDoc");
- indexDir = createTempDir("testIndex");
+ private Path workDir;
+ private Path indexDir;
+ private LinkedList files;
+
+ /** Set the test case. This test case needs
+ * a few text files created in the current working directory.
+ */
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ if (VERBOSE) {
+ System.out.println("TEST: setUp");
+ }
+ workDir = createTempDir("TestDoc");
+ indexDir = createTempDir("testIndex");
- Directory directory = newFSDirectory(indexDir);
- directory.close();
+ Directory directory = newFSDirectory(indexDir);
+ directory.close();
- files = new LinkedList<>();
- files.add(createOutput("test.txt",
- "This is the first test file"
- ));
-
- files.add(createOutput("test2.txt",
- "This is the second test file"
- ));
- }
-
- private Path createOutput(String name, String text) throws IOException {
- Writer fw = null;
- PrintWriter pw = null;
-
- try {
- Path path = workDir.resolve(name);
- Files.deleteIfExists(path);
-
- fw = new OutputStreamWriter(Files.newOutputStream(path), StandardCharsets.UTF_8);
- pw = new PrintWriter(fw);
- pw.println(text);
- return path;
-
- } finally {
- if (pw != null) pw.close();
- if (fw != null) fw.close();
- }
+ files = new LinkedList<>();
+ files.add(createOutput("test.txt",
+ "This is the first test file"
+ ));
+
+ files.add(createOutput("test2.txt",
+ "This is the second test file"
+ ));
+ }
+
+ private Path createOutput(String name, String text) throws IOException {
+ Writer fw = null;
+ PrintWriter pw = null;
+
+ try {
+ Path path = workDir.resolve(name);
+ Files.deleteIfExists(path);
+
+ fw = new OutputStreamWriter(Files.newOutputStream(path), StandardCharsets.UTF_8);
+ pw = new PrintWriter(fw);
+ pw.println(text);
+ return path;
+
+ } finally {
+ if (pw != null) pw.close();
+ if (fw != null) fw.close();
}
+ }
- /** This test executes a number of merges and compares the contents of
- * the segments created when using compound file or not using one.
- *
- * TODO: the original test used to print the segment contents to System.out
- * for visual validation. To have the same effect, a new method
- * checkSegment(String name, ...) should be created that would
- * assert various things about the segment.
- */
- public void testIndexAndMerge() throws Exception {
- StringWriter sw = new StringWriter();
- PrintWriter out = new PrintWriter(sw, true);
+ /** This test executes a number of merges and compares the contents of
+ * the segments created when using compound file or not using one.
+ *
+ * TODO: the original test used to print the segment contents to System.out
+ * for visual validation. To have the same effect, a new method
+ * checkSegment(String name, ...) should be created that would
+ * assert various things about the segment.
+ */
+ public void testIndexAndMerge() throws Exception {
+ StringWriter sw = new StringWriter();
+ PrintWriter out = new PrintWriter(sw, true);
- Directory directory = newFSDirectory(indexDir);
+ Directory directory = newFSDirectory(indexDir);
- if (directory instanceof MockDirectoryWrapper) {
- // We create unreferenced files (we don't even write
- // a segments file):
- ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
- // this test itself deletes files (has no retry mechanism)
- ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
- }
+ if (directory instanceof MockDirectoryWrapper) {
+ // We create unreferenced files (we don't even write
+ // a segments file):
+ ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
+ // this test itself deletes files (has no retry mechanism)
+ ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
+ }
- IndexWriter writer = new IndexWriter(
- directory,
- newIndexWriterConfig(new MockAnalyzer(random())).
- setOpenMode(OpenMode.CREATE).
- setMaxBufferedDocs(-1).
- setMergePolicy(newLogMergePolicy(10))
- );
-
- SegmentCommitInfo si1 = indexDoc(writer, "test.txt");
- printSegment(out, si1);
-
- SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
- printSegment(out, si2);
- writer.close();
+ IndexWriter writer = new IndexWriter(
+ directory,
+ newIndexWriterConfig(new MockAnalyzer(random())).
+ setOpenMode(OpenMode.CREATE).
+ setMaxBufferedDocs(-1).
+ setMergePolicy(newLogMergePolicy(10))
+ );
+
+ SegmentCommitInfo si1 = indexDoc(writer, "test.txt");
+ printSegment(out, si1);
+
+ SegmentCommitInfo si2 = indexDoc(writer, "test2.txt");
+ printSegment(out, si2);
+ writer.close();
- SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
- printSegment(out, siMerge);
+ SegmentCommitInfo siMerge = merge(directory, si1, si2, "_merge", false);
+ printSegment(out, siMerge);
- SegmentCommitInfo siMerge2 = merge(directory, si1, si2, "_merge2", false);
- printSegment(out, siMerge2);
+ SegmentCommitInfo siMerge2 = merge(directory, si1, si2, "_merge2", false);
+ printSegment(out, siMerge2);
- SegmentCommitInfo siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", false);
- printSegment(out, siMerge3);
+ SegmentCommitInfo siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", false);
+ printSegment(out, siMerge3);
- directory.close();
- out.close();
- sw.close();
-
- String multiFileOutput = sw.toString();
- //System.out.println(multiFileOutput);
-
- sw = new StringWriter();
- out = new PrintWriter(sw, true);
-
- directory = newFSDirectory(indexDir);
-
- if (directory instanceof MockDirectoryWrapper) {
- // We create unreferenced files (we don't even write
- // a segments file):
- ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
- // this test itself deletes files (has no retry mechanism)
- ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
- }
+ directory.close();
+ out.close();
+ sw.close();
+
+ String multiFileOutput = sw.toString();
+ //System.out.println(multiFileOutput);
+
+ sw = new StringWriter();
+ out = new PrintWriter(sw, true);
+
+ directory = newFSDirectory(indexDir);
+
+ if (directory instanceof MockDirectoryWrapper) {
+ // We create unreferenced files (we don't even write
+ // a segments file):
+ ((MockDirectoryWrapper) directory).setAssertNoUnrefencedFilesOnClose(false);
+ // this test itself deletes files (has no retry mechanism)
+ ((MockDirectoryWrapper) directory).setEnableVirusScanner(false);
+ }
- writer = new IndexWriter(
- directory,
- newIndexWriterConfig(new MockAnalyzer(random())).
- setOpenMode(OpenMode.CREATE).
- setMaxBufferedDocs(-1).
- setMergePolicy(newLogMergePolicy(10))
- );
-
- si1 = indexDoc(writer, "test.txt");
- printSegment(out, si1);
-
- si2 = indexDoc(writer, "test2.txt");
- printSegment(out, si2);
- writer.close();
+ writer = new IndexWriter(
+ directory,
+ newIndexWriterConfig(new MockAnalyzer(random())).
+ setOpenMode(OpenMode.CREATE).
+ setMaxBufferedDocs(-1).
+ setMergePolicy(newLogMergePolicy(10))
+ );
+
+ si1 = indexDoc(writer, "test.txt");
+ printSegment(out, si1);
+
+ si2 = indexDoc(writer, "test2.txt");
+ printSegment(out, si2);
+ writer.close();
- siMerge = merge(directory, si1, si2, "_merge", true);
- printSegment(out, siMerge);
+ siMerge = merge(directory, si1, si2, "_merge", true);
+ printSegment(out, siMerge);
- siMerge2 = merge(directory, si1, si2, "_merge2", true);
- printSegment(out, siMerge2);
+ siMerge2 = merge(directory, si1, si2, "_merge2", true);
+ printSegment(out, siMerge2);
- siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", true);
- printSegment(out, siMerge3);
+ siMerge3 = merge(directory, siMerge, siMerge2, "_merge3", true);
+ printSegment(out, siMerge3);
- directory.close();
- out.close();
- sw.close();
- String singleFileOutput = sw.toString();
-
- assertEquals(multiFileOutput, singleFileOutput);
- }
-
- private SegmentCommitInfo indexDoc(IndexWriter writer, String fileName)
- throws Exception
- {
- Path path = workDir.resolve(fileName);
- Document doc = new Document();
- InputStreamReader is = new InputStreamReader(Files.newInputStream(path), StandardCharsets.UTF_8);
- doc.add(new TextField("contents", is));
- writer.addDocument(doc);
- writer.commit();
- is.close();
- return writer.newestSegment();
- }
-
-
- private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
- throws Exception {
- IOContext context = newIOContext(random());
- SegmentReader r1 = new SegmentReader(si1, context);
- SegmentReader r2 = new SegmentReader(si2, context);
-
- final Codec codec = Codec.getDefault();
- TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
- final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
-
- SegmentMerger merger = new SegmentMerger(Arrays.asList(r1, r2),
- si, InfoStream.getDefault(), trackingDir,
- MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), context);
-
- MergeState mergeState = merger.merge();
- r1.close();
- r2.close();;
- si.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
+ directory.close();
+ out.close();
+ sw.close();
+ String singleFileOutput = sw.toString();
+
+ assertEquals(multiFileOutput, singleFileOutput);
+ }
+
+ private SegmentCommitInfo indexDoc(IndexWriter writer, String fileName)
+ throws Exception
+ {
+ Path path = workDir.resolve(fileName);
+ Document doc = new Document();
+ InputStreamReader is = new InputStreamReader(Files.newInputStream(path), StandardCharsets.UTF_8);
+ doc.add(new TextField("contents", is));
+ writer.addDocument(doc);
+ writer.commit();
+ is.close();
+ return writer.newestSegment();
+ }
+
+
+ private SegmentCommitInfo merge(Directory dir, SegmentCommitInfo si1, SegmentCommitInfo si2, String merged, boolean useCompoundFile)
+ throws Exception {
+ IOContext context = newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1)));
+ SegmentReader r1 = new SegmentReader(si1, context);
+ SegmentReader r2 = new SegmentReader(si2, context);
+
+ final Codec codec = Codec.getDefault();
+ TrackingDirectoryWrapper trackingDir = new TrackingDirectoryWrapper(si1.info.dir);
+ final SegmentInfo si = new SegmentInfo(si1.info.dir, Version.LATEST, merged, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
+
+ SegmentMerger merger = new SegmentMerger(Arrays.asList(r1, r2),
+ si, InfoStream.getDefault(), trackingDir,
+ new FieldInfos.FieldNumbers(), context);
+
+ MergeState mergeState = merger.merge();
+ r1.close();
+ r2.close();;
+ si.setFiles(new HashSet<>(trackingDir.getCreatedFiles()));
- if (useCompoundFile) {
- Collection filesToDelete = IndexWriter.createCompoundFile(InfoStream.getDefault(), dir, MergeState.CheckAbort.NONE, si, newIOContext(random()));
- si.setUseCompoundFile(true);
- for (final String fileToDelete : filesToDelete) {
- si1.info.dir.deleteFile(fileToDelete);
- }
+ if (useCompoundFile) {
+ Collection filesToDelete = IndexWriter.createCompoundFile(InfoStream.getDefault(), dir, si, newIOContext(random()));
+ si.setUseCompoundFile(true);
+ for (final String fileToDelete : filesToDelete) {
+ si1.info.dir.deleteFile(fileToDelete);
}
+ }
- return new SegmentCommitInfo(si, 0, -1L, -1L, -1L);
- }
+ return new SegmentCommitInfo(si, 0, -1L, -1L, -1L);
+ }
- private void printSegment(PrintWriter out, SegmentCommitInfo si)
- throws Exception {
- SegmentReader reader = new SegmentReader(si, newIOContext(random()));
-
- for (int i = 0; i < reader.numDocs(); i++)
- out.println(reader.document(i));
-
- Fields fields = reader.fields();
- for (String field : fields) {
- Terms terms = fields.terms(field);
- assertNotNull(terms);
- TermsEnum tis = terms.iterator(null);
- while(tis.next() != null) {
-
- out.print(" term=" + field + ":" + tis.term());
- out.println(" DF=" + tis.docFreq());
-
- DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getLiveDocs(), null);
-
- while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
- out.print(" doc=" + positions.docID());
- out.print(" TF=" + positions.freq());
- out.print(" pos=");
- out.print(positions.nextPosition());
- for (int j = 1; j < positions.freq(); j++)
- out.print("," + positions.nextPosition());
- out.println("");
- }
+ private void printSegment(PrintWriter out, SegmentCommitInfo si)
+ throws Exception {
+ SegmentReader reader = new SegmentReader(si, newIOContext(random()));
+
+ for (int i = 0; i < reader.numDocs(); i++)
+ out.println(reader.document(i));
+
+ Fields fields = reader.fields();
+ for (String field : fields) {
+ Terms terms = fields.terms(field);
+ assertNotNull(terms);
+ TermsEnum tis = terms.iterator(null);
+ while(tis.next() != null) {
+
+ out.print(" term=" + field + ":" + tis.term());
+ out.println(" DF=" + tis.docFreq());
+
+ DocsAndPositionsEnum positions = tis.docsAndPositions(reader.getLiveDocs(), null);
+
+ while (positions.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+ out.print(" doc=" + positions.docID());
+ out.print(" TF=" + positions.freq());
+ out.print(" pos=");
+ out.print(positions.nextPosition());
+ for (int j = 1; j < positions.freq(); j++)
+ out.print("," + positions.nextPosition());
+ out.println("");
}
}
- reader.close();
}
+ reader.close();
+ }
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java Mon Jan 5 14:28:28 2015
@@ -432,7 +432,7 @@ public class TestIndexFileDeleter extend
if (ms instanceof ConcurrentMergeScheduler) {
final ConcurrentMergeScheduler suppressFakeFail = new ConcurrentMergeScheduler() {
@Override
- protected void handleMergeException(Throwable exc) {
+ protected void handleMergeException(Directory dir, Throwable exc) {
// suppress only FakeIOException:
if (exc instanceof RuntimeException && exc.getMessage().equals("fake fail")) {
// ok to ignore
@@ -440,13 +440,12 @@ public class TestIndexFileDeleter extend
&& exc.getCause() != null && "fake fail".equals(exc.getCause().getMessage())) {
// also ok to ignore
} else {
- super.handleMergeException(exc);
+ super.handleMergeException(dir, exc);
}
}
};
final ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) ms;
suppressFakeFail.setMaxMergesAndThreads(cms.getMaxMergeCount(), cms.getMaxThreadCount());
- suppressFakeFail.setMergeThreadPriority(cms.getMergeThreadPriority());
iwc.setMergeScheduler(suppressFakeFail);
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java Mon Jan 5 14:28:28 2015
@@ -2563,7 +2563,7 @@ public class TestIndexWriter extends Luc
iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
@Override
- public void doMerge(MergePolicy.OneMerge merge) throws IOException {
+ public void doMerge(IndexWriter writer, MergePolicy.OneMerge merge) throws IOException {
mergeStarted.countDown();
try {
closeStarted.await();
@@ -2571,7 +2571,7 @@ public class TestIndexWriter extends Luc
Thread.currentThread().interrupt();
throw new RuntimeException(ie);
}
- super.doMerge(merge);
+ super.doMerge(writer, merge);
}
@Override
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java Mon Jan 5 14:28:28 2015
@@ -57,15 +57,15 @@ import org.apache.lucene.store.Directory
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.MockDirectoryWrapper;
+import org.apache.lucene.store.MockDirectoryWrapper.FakeIOException;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
+import org.apache.lucene.util.TestUtil;
@SuppressCodecs("SimpleText") // too slow here
public class TestIndexWriterExceptions extends LuceneTestCase {
@@ -1951,16 +1951,15 @@ public class TestIndexWriterExceptions e
if (ms instanceof ConcurrentMergeScheduler) {
final ConcurrentMergeScheduler suppressFakeIOE = new ConcurrentMergeScheduler() {
@Override
- protected void handleMergeException(Throwable exc) {
+ protected void handleMergeException(Directory dir, Throwable exc) {
// suppress only FakeIOException:
if (!(exc instanceof FakeIOException)) {
- super.handleMergeException(exc);
+ super.handleMergeException(dir, exc);
}
}
};
final ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) ms;
suppressFakeIOE.setMaxMergesAndThreads(cms.getMaxMergeCount(), cms.getMaxThreadCount());
- suppressFakeIOE.setMergeThreadPriority(cms.getMergeThreadPriority());
iwc.setMergeScheduler(suppressFakeIOE);
}
Modified: lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java (original)
+++ lucene/dev/trunk/lucene/core/src/test/org/apache/lucene/index/TestSegmentMerger.java Mon Jan 5 14:28:28 2015
@@ -25,6 +25,8 @@ import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Document;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.InfoStream;
@@ -83,8 +85,9 @@ public class TestSegmentMerger extends L
final SegmentInfo si = new SegmentInfo(mergedDir, Version.LATEST, mergedSegment, -1, false, codec, null, StringHelper.randomId(), new HashMap<>());
SegmentMerger merger = new SegmentMerger(Arrays.asList(reader1, reader2),
- si, InfoStream.getDefault(), mergedDir,
- MergeState.CheckAbort.NONE, new FieldInfos.FieldNumbers(), newIOContext(random()));
+ si, InfoStream.getDefault(), mergedDir,
+ new FieldInfos.FieldNumbers(),
+ newIOContext(random(), new IOContext(new MergeInfo(-1, -1, false, -1))));
MergeState mergeState = merger.merge();
int docsMerged = mergeState.segmentInfo.getDocCount();
assertTrue(docsMerged == 2);
Modified: lucene/dev/trunk/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java (original)
+++ lucene/dev/trunk/lucene/sandbox/src/test/org/apache/lucene/codecs/idversion/TestIDVersionPostingsFormat.java Mon Jan 5 14:28:28 2015
@@ -29,8 +29,8 @@ import java.util.concurrent.ConcurrentHa
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicLong;
-import org.apache.lucene.analysis.Analyzer.TokenStreamComponents;
import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.Analyzer.TokenStreamComponents;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.MockTokenizer;
@@ -387,7 +387,7 @@ public class TestIDVersionPostingsFormat
if (ms instanceof ConcurrentMergeScheduler) {
iwc.setMergeScheduler(new ConcurrentMergeScheduler() {
@Override
- protected void handleMergeException(Throwable exc) {
+ protected void handleMergeException(Directory dir, Throwable exc) {
assertTrue(exc instanceof IllegalArgumentException);
}
});
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/codecs/cranky/CrankyCompoundFormat.java Mon Jan 5 14:28:28 2015
@@ -22,7 +22,6 @@ import java.util.Collection;
import java.util.Random;
import org.apache.lucene.codecs.CompoundFormat;
-import org.apache.lucene.index.MergeState.CheckAbort;
import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
@@ -42,11 +41,11 @@ class CrankyCompoundFormat extends Compo
}
@Override
- public void write(Directory dir, SegmentInfo si, Collection files, CheckAbort checkAbort, IOContext context) throws IOException {
+ public void write(Directory dir, SegmentInfo si, Collection files, IOContext context) throws IOException {
if (random.nextInt(100) == 0) {
throw new IOException("Fake IOException from CompoundFormat.write()");
}
- delegate.write(dir, si, files, checkAbort, context);
+ delegate.write(dir, si, files, context);
}
@Override
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/BaseCompoundFormatTestCase.java Mon Jan 5 14:28:28 2015
@@ -55,7 +55,7 @@ public abstract class BaseCompoundFormat
Directory dir = newDirectory();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
assertEquals(0, cfs.listAll().length);
cfs.close();
@@ -74,7 +74,7 @@ public abstract class BaseCompoundFormat
createSequenceFile(dir, testfile, (byte) 0, data[i]);
SegmentInfo si = newSegmentInfo(dir, "_" + i);
- si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
IndexInput expected = dir.openInput(testfile, newIOContext(random()));
@@ -98,7 +98,7 @@ public abstract class BaseCompoundFormat
createSequenceFile(dir, files[1], (byte) 0, 114);
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Arrays.asList(files), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Arrays.asList(files), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
for (String file : files) {
@@ -124,7 +124,7 @@ public abstract class BaseCompoundFormat
out.close();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
assertEquals(1, cfs.listAll().length);
cfs.close();
@@ -149,7 +149,7 @@ public abstract class BaseCompoundFormat
out.close();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, myContext);
+ si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), myContext);
dir.close();
}
@@ -168,7 +168,7 @@ public abstract class BaseCompoundFormat
out.close();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), MergeState.CheckAbort.NONE, context);
+ si.getCodec().compoundFormat().write(dir, si, Collections.singleton(testfile), context);
dir.close();
}
@@ -218,7 +218,7 @@ public abstract class BaseCompoundFormat
Directory dir = newDirectory();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
try {
cfs.createOutput("bogus", IOContext.DEFAULT);
@@ -240,7 +240,7 @@ public abstract class BaseCompoundFormat
out.close();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
try {
cfs.deleteFile(testfile);
@@ -262,7 +262,7 @@ public abstract class BaseCompoundFormat
out.close();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
try {
cfs.renameFile(testfile, "bogus");
@@ -284,7 +284,7 @@ public abstract class BaseCompoundFormat
out.close();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
try {
cfs.sync(Collections.singleton(testfile));
@@ -306,7 +306,7 @@ public abstract class BaseCompoundFormat
out.close();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Collections.emptyList(), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
try {
cfs.makeLock("foobar");
@@ -345,7 +345,7 @@ public abstract class BaseCompoundFormat
String files[] = dir.listAll();
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Arrays.asList(files), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Arrays.asList(files), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
for (int i = 0; i < files.length; i++) {
@@ -376,7 +376,7 @@ public abstract class BaseCompoundFormat
assertEquals(0, dir.getFileHandleCount());
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, Arrays.asList(dir.listAll()), MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, Arrays.asList(dir.listAll()), IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
final IndexInput[] ins = new IndexInput[FILE_COUNT];
@@ -729,7 +729,7 @@ public abstract class BaseCompoundFormat
}
SegmentInfo si = newSegmentInfo(dir, "_123");
- si.getCodec().compoundFormat().write(dir, si, files, MergeState.CheckAbort.NONE, IOContext.DEFAULT);
+ si.getCodec().compoundFormat().write(dir, si, files, IOContext.DEFAULT);
Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT);
return cfs;
}
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/index/SuppressingConcurrentMergeScheduler.java Mon Jan 5 14:28:28 2015
@@ -1,5 +1,7 @@
package org.apache.lucene.index;
+import org.apache.lucene.store.Directory;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -17,22 +19,17 @@ package org.apache.lucene.index;
* limitations under the License.
*/
-import java.io.IOException;
-
-import org.apache.lucene.store.AlreadyClosedException;
-import org.apache.lucene.util.IOUtils;
-
/** A {@link ConcurrentMergeScheduler} that ignores AlreadyClosedException. */
public abstract class SuppressingConcurrentMergeScheduler extends ConcurrentMergeScheduler {
@Override
- protected void handleMergeException(Throwable exc) {
+ protected void handleMergeException(Directory dir, Throwable exc) {
while (true) {
if (isOK(exc)) {
return;
}
exc = exc.getCause();
if (exc == null) {
- super.handleMergeException(exc);
+ super.handleMergeException(dir, exc);
}
}
}
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/store/MockDirectoryWrapper.java Mon Jan 5 14:28:28 2015
@@ -611,7 +611,7 @@ public class MockDirectoryWrapper extend
// throttling REALLY slows down tests, so don't do it very often for SOMETIMES.
if (throttling == Throttling.ALWAYS ||
- (throttling == Throttling.SOMETIMES && randomState.nextInt(200) == 0) && !(in instanceof RateLimitedDirectoryWrapper)) {
+ (throttling == Throttling.SOMETIMES && randomState.nextInt(200) == 0)) {
if (LuceneTestCase.VERBOSE) {
System.out.println("MockDirectoryWrapper: throttling indexOutput (" + name + ")");
}
Modified: lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java (original)
+++ lucene/dev/trunk/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java Mon Jan 5 14:28:28 2015
@@ -123,13 +123,11 @@ import org.apache.lucene.store.FSDirecto
import org.apache.lucene.store.FSLockFactory;
import org.apache.lucene.store.FlushInfo;
import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.store.LockFactory;
import org.apache.lucene.store.MergeInfo;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.MockDirectoryWrapper.Throttling;
import org.apache.lucene.store.NRTCachingDirectory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
import org.apache.lucene.util.automaton.AutomatonTestUtil;
import org.apache.lucene.util.automaton.CompiledAutomaton;
import org.apache.lucene.util.automaton.RegExp;
@@ -914,13 +912,17 @@ public abstract class LuceneTestCase ext
} else {
cms = new ConcurrentMergeScheduler() {
@Override
- protected synchronized void maybeStall() {
+ protected synchronized void maybeStall(IndexWriter writer) {
}
};
}
int maxThreadCount = TestUtil.nextInt(r, 1, 4);
int maxMergeCount = TestUtil.nextInt(r, maxThreadCount, maxThreadCount + 4);
cms.setMaxMergesAndThreads(maxMergeCount, maxThreadCount);
+ if (random().nextBoolean()) {
+ cms.disableAutoIOThrottle();
+ }
+ cms.setForceMergeMBPerSec(10 + 10*random().nextDouble());
c.setMergeScheduler(cms);
} else {
// Always use consistent settings, else CMS's dynamic (SSD or not)
@@ -1347,27 +1349,6 @@ public abstract class LuceneTestCase ext
directory = new NRTCachingDirectory(directory, random.nextDouble(), random.nextDouble());
}
- if (TEST_NIGHTLY && rarely(random) && !bare) {
- final double maxMBPerSec = TestUtil.nextInt(random, 20, 40);
- if (LuceneTestCase.VERBOSE) {
- System.out.println("LuceneTestCase: will rate limit output IndexOutput to " + maxMBPerSec + " MB/sec");
- }
- final RateLimitedDirectoryWrapper rateLimitedDirectoryWrapper = new RateLimitedDirectoryWrapper(directory);
- switch (random.nextInt(10)) {
- case 3: // sometimes rate limit on flush
- rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.FLUSH);
- break;
- case 2: // sometimes rate limit flush & merge
- rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.FLUSH);
- rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.MERGE);
- break;
- default:
- rateLimitedDirectoryWrapper.setMaxWriteMBPerSec(maxMBPerSec, Context.MERGE);
- }
- directory = rateLimitedDirectoryWrapper;
-
- }
-
if (bare) {
BaseDirectoryWrapper base = new BaseDirectoryWrapper(directory);
closeAfterSuite(new CloseableDirectory(base, suiteFailureMarker));
Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java Mon Jan 5 14:28:28 2015
@@ -36,7 +36,6 @@ import org.apache.lucene.store.LockFacto
import org.apache.lucene.store.NRTCachingDirectory;
import org.apache.lucene.store.NativeFSLockFactory;
import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
import org.apache.lucene.store.SimpleFSLockFactory;
import org.apache.lucene.store.SingleInstanceLockFactory;
import org.apache.lucene.util.IOUtils;
@@ -350,7 +349,6 @@ public abstract class CachingDirectoryFa
directory = create(fullPath, createLockFactory(rawLockType), dirContext);
boolean success = false;
try {
- directory = rateLimit(directory);
CacheValue newCacheValue = new CacheValue(fullPath, directory);
byDirectoryCache.put(directory, newCacheValue);
byPathCache.put(fullPath, newCacheValue);
@@ -370,25 +368,6 @@ public abstract class CachingDirectoryFa
}
}
- private Directory rateLimit(Directory directory) {
- if (maxWriteMBPerSecDefault != null || maxWriteMBPerSecFlush != null || maxWriteMBPerSecMerge != null || maxWriteMBPerSecRead != null) {
- directory = new RateLimitedDirectoryWrapper(directory);
- if (maxWriteMBPerSecDefault != null) {
- ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecDefault, Context.DEFAULT);
- }
- if (maxWriteMBPerSecFlush != null) {
- ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecFlush, Context.FLUSH);
- }
- if (maxWriteMBPerSecMerge != null) {
- ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecMerge, Context.MERGE);
- }
- if (maxWriteMBPerSecRead != null) {
- ((RateLimitedDirectoryWrapper)directory).setMaxWriteMBPerSec(maxWriteMBPerSecRead, Context.READ);
- }
- }
- return directory;
- }
-
/*
* (non-Javadoc)
*
Modified: lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java (original)
+++ lucene/dev/trunk/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java Mon Jan 5 14:28:28 2015
@@ -28,7 +28,6 @@ import org.apache.lucene.store.LockFacto
import org.apache.lucene.store.NRTCachingDirectory;
import org.apache.lucene.store.NativeFSLockFactory;
import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
import org.apache.lucene.store.SimpleFSLockFactory;
import org.apache.lucene.store.SingleInstanceLockFactory;
import org.apache.solr.common.SolrException;
@@ -113,8 +112,7 @@ public class StandardDirectoryFactory ex
* carefully - some Directory wrappers will
* cache files for example.
*
- * This implementation works with two wrappers:
- * NRTCachingDirectory and RateLimitedDirectoryWrapper.
+ * This implementation works with NRTCachingDirectory.
*
* You should first {@link Directory#sync(java.util.Collection)} any file that will be
* moved or avoid cached files through settings.
@@ -143,13 +141,11 @@ public class StandardDirectoryFactory ex
super.move(fromDir, toDir, fileName, ioContext);
}
- // special hack to work with NRTCachingDirectory and RateLimitedDirectoryWrapper
+ // special hack to work with NRTCachingDirectory
private Directory getBaseDir(Directory dir) {
Directory baseDir;
if (dir instanceof NRTCachingDirectory) {
baseDir = ((NRTCachingDirectory)dir).getDelegate();
- } else if (dir instanceof RateLimitedDirectoryWrapper) {
- baseDir = ((RateLimitedDirectoryWrapper)dir).getDelegate();
} else {
baseDir = dir;
}
Modified: lucene/dev/trunk/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java (original)
+++ lucene/dev/trunk/solr/core/src/test/org/apache/solr/BasicFunctionalityTest.java Mon Jan 5 14:28:28 2015
@@ -37,7 +37,6 @@ import org.apache.lucene.index.StoredDoc
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext.Context;
import org.apache.lucene.store.MockDirectoryWrapper;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
import org.apache.lucene.util.English;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
Modified: lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java (original)
+++ lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockDirectoryFactory.java Mon Jan 5 14:28:28 2015
@@ -25,7 +25,6 @@ import org.apache.lucene.store.LockFacto
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.NRTCachingDirectory;
import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
@@ -86,9 +85,6 @@ public class MockDirectoryFactory extend
if (dir instanceof NRTCachingDirectory) {
cdir = ((NRTCachingDirectory)dir).getDelegate();
}
- if (cdir instanceof RateLimitedDirectoryWrapper) {
- cdir = ((RateLimitedDirectoryWrapper)dir).getDelegate();
- }
if (cdir instanceof TrackingDirectoryWrapper) {
cdir = ((TrackingDirectoryWrapper)dir).getDelegate();
}
Modified: lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java?rev=1649532&r1=1649531&r2=1649532&view=diff
==============================================================================
--- lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java (original)
+++ lucene/dev/trunk/solr/test-framework/src/java/org/apache/solr/core/MockFSDirectoryFactory.java Mon Jan 5 14:28:28 2015
@@ -25,7 +25,6 @@ import org.apache.lucene.store.Directory
import org.apache.lucene.store.LockFactory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.store.NRTCachingDirectory;
-import org.apache.lucene.store.RateLimitedDirectoryWrapper;
import org.apache.lucene.store.TrackingDirectoryWrapper;
import org.apache.lucene.util.LuceneTestCase;
@@ -69,9 +68,6 @@ public class MockFSDirectoryFactory exte
if (dir instanceof NRTCachingDirectory) {
cdir = ((NRTCachingDirectory)dir).getDelegate();
}
- if (cdir instanceof RateLimitedDirectoryWrapper) {
- cdir = ((RateLimitedDirectoryWrapper)dir).getDelegate();
- }
if (cdir instanceof TrackingDirectoryWrapper) {
cdir = ((TrackingDirectoryWrapper)dir).getDelegate();
}