Return-Path: X-Original-To: apmail-hbase-commits-archive@www.apache.org Delivered-To: apmail-hbase-commits-archive@www.apache.org Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by minotaur.apache.org (Postfix) with SMTP id CC350105FC for ; Thu, 25 Jul 2013 20:03:57 +0000 (UTC) Received: (qmail 95947 invoked by uid 500); 25 Jul 2013 20:03:57 -0000 Delivered-To: apmail-hbase-commits-archive@hbase.apache.org Received: (qmail 95902 invoked by uid 500); 25 Jul 2013 20:03:57 -0000 Mailing-List: contact commits-help@hbase.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Reply-To: dev@hbase.apache.org Delivered-To: mailing list commits@hbase.apache.org Received: (qmail 95895 invoked by uid 99); 25 Jul 2013 20:03:57 -0000 Received: from athena.apache.org (HELO athena.apache.org) (140.211.11.136) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 25 Jul 2013 20:03:56 +0000 X-ASF-Spam-Status: No, hits=-2000.0 required=5.0 tests=ALL_TRUSTED X-Spam-Check-By: apache.org Received: from [140.211.11.4] (HELO eris.apache.org) (140.211.11.4) by apache.org (qpsmtpd/0.29) with ESMTP; Thu, 25 Jul 2013 20:03:55 +0000 Received: from eris.apache.org (localhost [127.0.0.1]) by eris.apache.org (Postfix) with ESMTP id 5959D23888A6; Thu, 25 Jul 2013 20:03:35 +0000 (UTC) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: svn commit: r1507107 - /hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java Date: Thu, 25 Jul 2013 20:03:35 -0000 To: commits@hbase.apache.org From: stack@apache.org X-Mailer: svnmailer-1.0.9 Message-Id: <20130725200335.5959D23888A6@eris.apache.org> X-Virus-Checked: Checked by ClamAV on apache.org Author: stack Date: Thu Jul 25 20:03:34 2013 New Revision: 1507107 URL: http://svn.apache.org/r1507107 Log: HBASE-9022 TestHLogSplit.testIOEOnOutputThread fails; MORE DEBUGGING Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java?rev=1507107&r1=1507106&r2=1507107&view=diff ============================================================================== --- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java (original) +++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLogSplitter.java Thu Jul 25 20:03:34 2013 @@ -161,7 +161,7 @@ public class HLogSplitter { 128*1024*1024)); this.minBatchSize = conf.getInt("hbase.regionserver.wal.logreplay.batch.size", 512); - this.distributedLogReplay = this.conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, + this.distributedLogReplay = this.conf.getBoolean(HConstants.DISTRIBUTED_LOG_REPLAY_KEY, HConstants.DEFAULT_DISTRIBUTED_LOG_REPLAY_CONFIG); this.numWriterThreads = conf.getInt("hbase.regionserver.hlog.splitlog.writer.threads", 3); @@ -1026,12 +1026,14 @@ public class HLogSplitter { return t; } }); - CompletionService completionService = new ExecutorCompletionService( - closeThreadPool); + CompletionService completionService = + new ExecutorCompletionService(closeThreadPool); for (final Map.Entry writersEntry : writers.entrySet()) { + LOG.debug("Submitting close of " + ((WriterAndPath)writersEntry.getValue()).p); completionService.submit(new Callable() { public Void call() throws Exception { WriterAndPath wap = (WriterAndPath) writersEntry.getValue(); + LOG.debug("Closing " + wap.p); try { wap.w.close(); } catch (IOException ioe) { @@ -1039,7 +1041,7 @@ public class HLogSplitter { thrown.add(ioe); return null; } - LOG.info("Closed path " + wap.p + " (wrote " + wap.editsWritten + " edits in " + LOG.info("Closed wap " + wap.p + " (wrote " + wap.editsWritten + " edits in " + (wap.nanosSpent / 1000 / 1000) + "ms)"); if (wap.editsWritten == 0) { @@ -1147,7 +1149,7 @@ public class HLogSplitter { thrown.add(ioe); continue; } - LOG.info("Closed path " + wap.p + " (wrote " + wap.editsWritten + " edits in " + LOG.info("Closed log " + wap.p + " (wrote " + wap.editsWritten + " edits in " + (wap.nanosSpent / 1000 / 1000) + "ms)"); } } @@ -1296,19 +1298,19 @@ public class HLogSplitter { private long waitRegionOnlineTimeOut; private final Set recoveredRegions = Collections.synchronizedSet(new HashSet()); - private final Map writers = + private final Map writers = new ConcurrentHashMap(); // online encoded region name -> region location map - private final Map onlineRegions = + private final Map onlineRegions = new ConcurrentHashMap(); private Map tableNameToHConnectionMap = Collections .synchronizedMap(new TreeMap(Bytes.BYTES_COMPARATOR)); /** - * Map key -> value layout + * Map key -> value layout * : -> Queue */ - private Map>> serverToBufferQueueMap = + private Map>> serverToBufferQueueMap = new ConcurrentHashMap>>(); private List thrown = new ArrayList(); @@ -1321,7 +1323,7 @@ public class HLogSplitter { public LogReplayOutputSink(int numWriters) { super(numWriters); - this.waitRegionOnlineTimeOut = conf.getInt("hbase.splitlog.manager.timeout", + this.waitRegionOnlineTimeOut = conf.getInt("hbase.splitlog.manager.timeout", SplitLogManager.DEFAULT_TIMEOUT); this.logRecoveredEditsOutputSink = new LogRecoveredEditsOutputSink(numWriters); this.logRecoveredEditsOutputSink.setReporter(reporter); @@ -1333,7 +1335,7 @@ public class HLogSplitter { LOG.warn("got an empty buffer, skipping"); return; } - + // check if current region in a disabling or disabled table if (disablingOrDisabledTables.contains(Bytes.toString(buffer.tableName))) { // need fall back to old way @@ -1433,7 +1435,7 @@ public class HLogSplitter { } try { - loc = locateRegionAndRefreshLastFlushedSequenceId(hconn, table, kv.getRow(), + loc = locateRegionAndRefreshLastFlushedSequenceId(hconn, table, kv.getRow(), encodeRegionNameStr); } catch (TableNotFoundException ex) { // table has been deleted so skip edits of the table @@ -1493,7 +1495,7 @@ public class HLogSplitter { // skip the edit if(needSkip) continue; - + // add the last row if (preRow != null && lastAddedRow != preRow) { synchronized (serverToBufferQueueMap) { @@ -1554,7 +1556,7 @@ public class HLogSplitter { } regionMaxSeqIdInStores.put(loc.getRegionInfo().getEncodedName(), storeIds); } - + if (cachedLastFlushedSequenceId == null || lastFlushedSequenceId > cachedLastFlushedSequenceId) { lastFlushedSequenceIds.put(loc.getRegionInfo().getEncodedName(), lastFlushedSequenceId); @@ -1599,7 +1601,7 @@ public class HLogSplitter { */ private HRegionLocation waitUntilRegionOnline(HRegionLocation loc, byte[] row, final long timeout) - throws IOException { + throws IOException { final long endTime = EnvironmentEdgeManager.currentTimeMillis() + timeout; final long pause = conf.getLong(HConstants.HBASE_CLIENT_PAUSE, HConstants.DEFAULT_HBASE_CLIENT_PAUSE); @@ -1631,12 +1633,12 @@ public class HLogSplitter { Thread.sleep(expectedSleep); } catch (InterruptedException e) { Thread.currentThread().interrupt(); - throw new IOException("Interrupted when waiting regon " + + throw new IOException("Interrupted when waiting regon " + loc.getRegionInfo().getEncodedName() + " online.", e); } tries++; } - + throw new IOException("Timeout when waiting region " + loc.getRegionInfo().getEncodedName() + " online for " + timeout + " milliseconds.", cause); } @@ -1802,7 +1804,7 @@ public class HLogSplitter { } return hconn; } - + private String getTableFromLocationStr(String loc) { /** * location key is in format #