lucene-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From markrmil...@apache.org
Subject lucene-solr git commit: SOLR-8575: Revert while investigated. (reverted from commit 482b40f841660820f633267a21e6df44aff55346)
Date Thu, 11 Feb 2016 13:36:37 GMT
Repository: lucene-solr
Updated Branches:
  refs/heads/branch_5x bdef47873 -> 68ba7a5e5


SOLR-8575: Revert while investigated. (reverted from commit 482b40f841660820f633267a21e6df44aff55346)


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/68ba7a5e
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/68ba7a5e
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/68ba7a5e

Branch: refs/heads/branch_5x
Commit: 68ba7a5e5275d4ad10e4e8f70e223f9b61d70b54
Parents: bdef478
Author: markrmiller <markrmiller@apache.org>
Authored: Thu Feb 11 08:36:30 2016 -0500
Committer: markrmiller <markrmiller@apache.org>
Committed: Thu Feb 11 08:36:30 2016 -0500

----------------------------------------------------------------------
 solr/CHANGES.txt                                |   3 -
 .../apache/solr/update/HdfsTransactionLog.java  |  22 +--
 .../java/org/apache/solr/update/UpdateLog.java  |   4 +-
 .../TlogReplayBufferedWhileIndexingTest.java    | 136 -------------------
 ...HdfsTlogReplayBufferedWhileIndexingTest.java |  63 ---------
 5 files changed, 13 insertions(+), 215 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/68ba7a5e/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 71b35c8..f49aa0a 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -283,9 +283,6 @@ Bug Fixes
 
 * SOLR-8640: CloudSolrClient does not send credentials for update request (noble, hoss)
 
-* SOLR-8575: Fix HDFSLogReader replay status numbers and a performance bug where we can reopen
-  FSDataInputStream too often. (Mark Miller, Patrick Dvorack)
-  
 * SOLR-8651: The commitWithin parameter is not passed on for deleteById in UpdateRequest
in
   distributed queries (Jessica Cheng Mallet via Erick Erickson)
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/68ba7a5e/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
index bff3486..3db65c6 100644
--- a/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
+++ b/solr/core/src/java/org/apache/solr/update/HdfsTransactionLog.java
@@ -390,18 +390,16 @@ public class HdfsTransactionLog extends TransactionLog {
       
       // we actually need a new reader to 
       // see if any data was added by the writer
-      if (pos >= sz) {
-        log.info("Read available inputstream data, opening new inputstream pos={} sz={}",
pos, sz);
-        
-        synchronized (HdfsTransactionLog.this) {
-          sz = fos.size();
-        }
-        
+      if (fis.position() >= sz) {
         fis.close();
         tlogOutStream.hflush();
-
-        FSDataInputStream fdis = fs.open(tlogFile);
-        fis = new FSDataFastInputStream(fdis, pos);
+        try {
+          FSDataInputStream fdis = fs.open(tlogFile);
+          fis = new FSDataFastInputStream(fdis, pos);
+          sz = fs.getFileStatus(tlogFile).getLen();
+        } catch (IOException e) {
+          throw new RuntimeException(e);
+        }
       }
       
       if (pos == 0) {
@@ -448,7 +446,7 @@ public class HdfsTransactionLog extends TransactionLog {
     
     @Override
     public long currentSize() {
-      return fos.size();
+      return sz;
     }
 
   }
@@ -606,3 +604,5 @@ class FSDataFastInputStream extends FastInputStream {
     return "readFromStream="+readFromStream +" pos="+pos +" end="+end + " bufferPos="+getBufferPos()
+ " position="+position() ;
   }
 }
+
+

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/68ba7a5e/solr/core/src/java/org/apache/solr/update/UpdateLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/update/UpdateLog.java b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
index 7e3fc9f..2456c3e 100644
--- a/solr/core/src/java/org/apache/solr/update/UpdateLog.java
+++ b/solr/core/src/java/org/apache/solr/update/UpdateLog.java
@@ -1333,8 +1333,8 @@ public class UpdateLog implements PluginInfoInitialized {
                 loglog.info(
                         "log replay status {} active={} starting pos={} current pos={} current
size={} % read={}",
                         translog, activeLog, recoveryInfo.positionOfStart, cpos, csize,
-                        Math.floor(cpos / (double) csize * 100.));
-
+                        Math.round(cpos / (double) csize * 100.));
+                
               }
             }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/68ba7a5e/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java
b/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java
deleted file mode 100644
index 5c03a60..0000000
--- a/solr/core/src/test/org/apache/solr/cloud/TlogReplayBufferedWhileIndexingTest.java
+++ /dev/null
@@ -1,136 +0,0 @@
-package org.apache.solr.cloud;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.lucene.util.LuceneTestCase.Nightly;
-import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
-import org.apache.lucene.util.LuceneTestCase.Slow;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.embedded.JettySolrRunner;
-import org.apache.solr.common.SolrInputDocument;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-@Slow
-@Nightly
-@SuppressSSL
-public class TlogReplayBufferedWhileIndexingTest extends AbstractFullDistribZkTestBase {
-
-  private List<StoppableIndexingThread> threads;
-  
-  public TlogReplayBufferedWhileIndexingTest() throws Exception {
-    super();
-    sliceCount = 1;
-    fixShardCount(2);
-    schemaString = "schema15.xml";      // we need a string id
-  }
-  
-  @BeforeClass
-  public static void beforeRestartWhileUpdatingTest() throws Exception {
-    System.setProperty("leaderVoteWait", "300000");
-    System.setProperty("solr.autoCommit.maxTime", "10000");
-    System.setProperty("solr.autoSoftCommit.maxTime", "3000");
-    if (System.getProperty("solr.hdfs.home") != null) useFactory("solr.StandardDirectoryFactory");
-  }
-  
-  @AfterClass
-  public static void afterRestartWhileUpdatingTest() {
-    System.clearProperty("leaderVoteWait");
-    System.clearProperty("solr.autoCommit.maxTime");
-    System.clearProperty("solr.autoSoftCommit.maxTime");
-  }
-
-  @Test
-  public void test() throws Exception {
-    handle.clear();
-    handle.put("timestamp", SKIPVAL);
-    
-    waitForRecoveriesToFinish(false);
-    
-    int numThreads = 1;
-    
-    threads = new ArrayList<>(numThreads);
-    
-    ArrayList<JettySolrRunner> allJetty = new ArrayList<>();
-    allJetty.addAll(jettys);
-    allJetty.remove(shardToLeaderJetty.get("shard1").jetty);
-    assert allJetty.size() == 1 : allJetty.size();
-    ChaosMonkey.stop(allJetty.get(0));
-    
-    StoppableIndexingThread indexThread;
-    for (int i = 0; i < numThreads; i++) {
-      indexThread = new StoppableIndexingThread(controlClient, cloudClient, Integer.toString(i),
false, 50000, 1, false);
-      threads.add(indexThread);
-      indexThread.start();
-    }
-
-    Thread.sleep(2000);
-    
-    ChaosMonkey.start(allJetty.get(0));
-    
-    Thread.sleep(45000);
-  
-    waitForThingsToLevelOut(320);
-    
-    Thread.sleep(2000);
-    
-    waitForRecoveriesToFinish(DEFAULT_COLLECTION, cloudClient.getZkStateReader(), false,
true);
-    
-    for (StoppableIndexingThread thread : threads) {
-      thread.safeStop();
-      thread.safeStop();
-    }
-    
-    waitForThingsToLevelOut(30);
-
-    checkShardConsistency(false, false);
-  }
-
-  @Override
-  protected void indexDoc(SolrInputDocument doc) throws IOException,
-      SolrServerException {
-    cloudClient.add(doc);
-  }
-
-  
-  @Override
-  public void distribTearDown() throws Exception {
-    // make sure threads have been stopped...
-    if (threads != null) {
-      for (StoppableIndexingThread thread : threads) {
-        thread.safeStop();
-      }
-    }
-
-    super.distribTearDown();
-  }
-  
-  // skip the randoms - they can deadlock...
-  @Override
-  protected void indexr(Object... fields) throws Exception {
-    SolrInputDocument doc = new SolrInputDocument();
-    addFields(doc, fields);
-    addFields(doc, "rnd_b", true);
-    indexDoc(doc);
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/68ba7a5e/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTlogReplayBufferedWhileIndexingTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTlogReplayBufferedWhileIndexingTest.java
b/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTlogReplayBufferedWhileIndexingTest.java
deleted file mode 100644
index 534bb90..0000000
--- a/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTlogReplayBufferedWhileIndexingTest.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package org.apache.solr.cloud.hdfs;
-
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.io.IOException;
-
-import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.lucene.util.LuceneTestCase.Slow;
-import org.apache.solr.cloud.TlogReplayBufferedWhileIndexingTest;
-import org.apache.solr.util.BadHdfsThreadsFilter;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-
-import com.carrotsearch.randomizedtesting.annotations.Nightly;
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
-
-@Slow
-@Nightly
-@ThreadLeakFilters(defaultFilters = true, filters = {
-    BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
-})
-public class HdfsTlogReplayBufferedWhileIndexingTest extends TlogReplayBufferedWhileIndexingTest
{
-  
-  public HdfsTlogReplayBufferedWhileIndexingTest() throws Exception {
-    super();
-  }
-
-  private static MiniDFSCluster dfsCluster;
-  
-  @BeforeClass
-  public static void setupClass() throws Exception {
-    dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
-    System.setProperty("solr.hdfs.blockcache.blocksperbank", "2048");
-  }
-  
-  @AfterClass
-  public static void teardownClass() throws Exception {
-    HdfsTestUtil.teardownClass(dfsCluster);
-    dfsCluster = null;
-  }
-
-  
-  @Override
-  protected String getDataDir(String dataDir) throws IOException {
-    return HdfsTestUtil.getDataDir(dfsCluster, dataDir);
-  }
-
-}


Mime
View raw message