incubator-blur-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amccu...@apache.org
Subject [1/2] git commit: Fixing BLUR-427
Date Tue, 07 Apr 2015 12:39:09 GMT
Repository: incubator-blur
Updated Branches:
  refs/heads/master ba12672c5 -> 96592cc32


Fixing BLUR-427


Project: http://git-wip-us.apache.org/repos/asf/incubator-blur/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-blur/commit/5e6589fc
Tree: http://git-wip-us.apache.org/repos/asf/incubator-blur/tree/5e6589fc
Diff: http://git-wip-us.apache.org/repos/asf/incubator-blur/diff/5e6589fc

Branch: refs/heads/master
Commit: 5e6589fcbb7111491fefa1added84aa419ef8659
Parents: ba12672
Author: Aaron McCurry <amccurry@gmail.com>
Authored: Tue Apr 7 08:28:15 2015 -0400
Committer: Aaron McCurry <amccurry@gmail.com>
Committed: Tue Apr 7 08:28:15 2015 -0400

----------------------------------------------------------------------
 .../lucene/codec/DiskDocValuesProducer.java     | 62 ++++++++------------
 1 file changed, 26 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-blur/blob/5e6589fc/blur-store/src/main/java/org/apache/blur/lucene/codec/DiskDocValuesProducer.java
----------------------------------------------------------------------
diff --git a/blur-store/src/main/java/org/apache/blur/lucene/codec/DiskDocValuesProducer.java
b/blur-store/src/main/java/org/apache/blur/lucene/codec/DiskDocValuesProducer.java
index 6a52fc2..3bc6737 100644
--- a/blur-store/src/main/java/org/apache/blur/lucene/codec/DiskDocValuesProducer.java
+++ b/blur-store/src/main/java/org/apache/blur/lucene/codec/DiskDocValuesProducer.java
@@ -21,6 +21,8 @@ import java.io.IOException;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.blur.trace.Trace;
+import org.apache.blur.trace.Tracer;
 import org.apache.lucene.codecs.CodecUtil;
 import org.apache.lucene.codecs.DocValuesProducer;
 import org.apache.lucene.index.BinaryDocValues;
@@ -45,11 +47,6 @@ class DiskDocValuesProducer extends DocValuesProducer {
   private final Map<Integer,NumericEntry> ordIndexes;
   private final IndexInput data;
 
-  // memory-resident structures
-  private final Map<Integer,BlockPackedReader> ordinalInstances = new HashMap<Integer,BlockPackedReader>();
-  private final Map<Integer,MonotonicBlockPackedReader> addressInstances = new HashMap<Integer,MonotonicBlockPackedReader>();
-  private final Map<Integer,MonotonicBlockPackedReader> ordIndexInstances = new HashMap<Integer,MonotonicBlockPackedReader>();
-  
   DiskDocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String
metaCodec, String metaExtension) throws IOException {
     String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix,
metaExtension);
     // read in the entries from the metadata file.
@@ -221,17 +218,14 @@ class DiskDocValuesProducer extends DocValuesProducer {
   private BinaryDocValues getVariableBinary(FieldInfo field, final BinaryEntry bytes) throws
IOException {
     final IndexInput data = this.data.clone();
     
+    Tracer trace = Trace.trace("getSorted - BlockPackedReader - create");
     final MonotonicBlockPackedReader addresses;
-    synchronized (addressInstances) {
-      MonotonicBlockPackedReader addrInstance = addressInstances.get(field.number);
-      if (addrInstance == null) {
-        data.seek(bytes.addressesOffset);
-        addrInstance = new MonotonicBlockPackedReader(data, bytes.packedIntsVersion, bytes.blockSize,
bytes.count, true);
-        addressInstances.put(field.number, addrInstance);
-      }
-      addresses = addrInstance;
+    try {
+      data.seek(bytes.addressesOffset);
+      addresses = new MonotonicBlockPackedReader(data, bytes.packedIntsVersion, bytes.blockSize,
bytes.count, true);
+    } finally {
+      trace.done();
     }
-
     return new LongBinaryDocValues() {
       @Override
       public void get(long id, BytesRef result) {
@@ -258,17 +252,15 @@ class DiskDocValuesProducer extends DocValuesProducer {
   public SortedDocValues getSorted(FieldInfo field) throws IOException {
     final int valueCount = (int) binaries.get(field.number).count;
     final BinaryDocValues binary = getBinary(field);
+    Tracer trace = Trace.trace("getSorted - BlockPackedReader - create");
     final BlockPackedReader ordinals;
-    synchronized (ordinalInstances) {
-      BlockPackedReader ordsInstance = ordinalInstances.get(field.number);
-      if (ordsInstance == null) {
-        NumericEntry entry = ords.get(field.number);
-        IndexInput data = this.data.clone();
-        data.seek(entry.offset);
-        ordsInstance = new BlockPackedReader(data, entry.packedIntsVersion, entry.blockSize,
entry.count, true);
-        ordinalInstances.put(field.number, ordsInstance);
-      }
-      ordinals = ordsInstance;
+    try{
+      NumericEntry entry = ords.get(field.number);
+      IndexInput data = this.data.clone();
+      data.seek(entry.offset);
+      ordinals = new BlockPackedReader(data, entry.packedIntsVersion, entry.blockSize, entry.count,
true);
+    } finally {
+      trace.done();
     }
     return new SortedDocValues() {
 
@@ -295,20 +287,18 @@ class DiskDocValuesProducer extends DocValuesProducer {
     // we keep the byte[]s and list of ords on disk, these could be large
     final LongBinaryDocValues binary = (LongBinaryDocValues) getBinary(field);
     final LongNumericDocValues ordinals = getNumeric(ords.get(field.number));
-    // but the addresses to the ord stream are in RAM
+
+    Tracer trace = Trace.trace("getSortedSet - MonotonicBlockPackedReader - create");
     final MonotonicBlockPackedReader ordIndex;
-    synchronized (ordIndexInstances) {
-      MonotonicBlockPackedReader ordIndexInstance = ordIndexInstances.get(field.number);
-      if (ordIndexInstance == null) {
-        NumericEntry entry = ordIndexes.get(field.number);
-        IndexInput data = this.data.clone();
-        data.seek(entry.offset);
-        ordIndexInstance = new MonotonicBlockPackedReader(data, entry.packedIntsVersion,
entry.blockSize, entry.count, true);
-        ordIndexInstances.put(field.number, ordIndexInstance);
-      }
-      ordIndex = ordIndexInstance;
+    try{
+      NumericEntry entry = ordIndexes.get(field.number);
+      IndexInput data = this.data.clone();
+      data.seek(entry.offset);
+      ordIndex = new MonotonicBlockPackedReader(data, entry.packedIntsVersion, entry.blockSize,
entry.count, true);
+    } finally {
+      trace.done();
     }
-    
+
     return new SortedSetDocValues() {
       long offset;
       long endOffset;


Mime
View raw message