hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prasan...@apache.org
Subject svn commit: r1661242 - in /hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode: ./ orc/stream/ orc/stream/readers/
Date Fri, 20 Feb 2015 23:45:00 GMT
Author: prasanthj
Date: Fri Feb 20 23:44:59 2015
New Revision: 1661242

URL: http://svn.apache.org/r1661242
Log:
HIVE-9419: LLAP: ORC decoding of row-groups (final commit)

Modified:
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/StreamUtils.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BinaryStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BooleanStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ByteStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/CharacterStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DateStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DecimalStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DoubleStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/FloatStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/IntStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/LongStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ShortStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/StringStreamReader.java
    hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/TimestampStreamReader.java

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/OrcColumnVectorProducer.java
Fri Feb 20 23:44:59 2015
@@ -73,37 +73,43 @@ public class OrcColumnVectorProducer ext
   protected void decodeBatch(EncodedColumnBatch<OrcBatchKey> batch,
       Consumer<ColumnVectorBatch> downstreamConsumer) {
     String fileName = batch.batchKey.file;
+
     // OrcEncodedDataProducer should have just loaded cache entries from this file.
     // The default LRU algorithm shouldn't have dropped the entries. To make it
-    // safe, untie the code from EDP into separate class and make use of loading cache.
+    // safe, untie the code from EDP into separate class and make use of loading cache. The
current
+    // assumption is that entries for the current file exists in metadata cache.
     try {
       OrcFileMetadata fileMetadata = metadataCache.getFileMetadata(fileName);
       OrcBatchKey stripeKey = batch.batchKey.clone();
-      // we are interested only in the stripe number. To make sure we get the correct stripe
+
+      // To get stripe metadata we only need to know the stripe number. Oddly, stripe metadata
+      // accepts BatchKey as key. We need to keep to row group index in batch key the same
to
+      // retrieve the stripe metadata properly. To make sure we get the correct stripe
       // metadata, set row group index to 0. That's how it is cached. See OrcEncodedDataProducer
       stripeKey.rgIx = 0;
       OrcStripeMetadata stripeMetadata = metadataCache.getStripeMetadata(stripeKey);
 
-      // Get non null row count from root column
+      // Get non null row count from root column, to get max vector batches
       int rgIdx = batch.batchKey.rgIx;
       OrcProto.RowIndexEntry rowIndex = stripeMetadata.getRowIndexes()[0].getEntry(rgIdx);
       long nonNullRowCount = getRowCount(rowIndex);
       int maxBatchesRG = (int) ((nonNullRowCount / VectorizedRowBatch.DEFAULT_SIZE) + 1);
       int batchSize = VectorizedRowBatch.DEFAULT_SIZE;
       int numCols = batch.columnIxs.length;
-      RecordReaderImpl.TreeReader[] columnStreams = createTreeReaders(numCols, batch, fileMetadata,
+      RecordReaderImpl.TreeReader[] columnReaders = createTreeReaders(numCols, batch, fileMetadata,
           stripeMetadata);
 
       for (int i = 0; i < maxBatchesRG; i++) {
         ColumnVectorBatch cvb = new ColumnVectorBatch(batch.columnIxs.length);
 
+        // for last batch in row group, adjust the batch size
         if (i == maxBatchesRG - 1) {
           batchSize = (int) (nonNullRowCount % VectorizedRowBatch.DEFAULT_SIZE);
           cvb.size = batchSize;
         }
 
         for (int idx = 0; idx < batch.columnIxs.length; idx++) {
-          cvb.cols[idx] = (ColumnVector) columnStreams[idx].nextVector(null, batchSize);
+          cvb.cols[idx] = (ColumnVector) columnReaders[idx].nextVector(null, batchSize);
         }
 
         // we are done reading a batch, send it to consumer for processing
@@ -122,29 +128,28 @@ public class OrcColumnVectorProducer ext
     RecordReaderImpl.TreeReader[] treeReaders = new RecordReaderImpl.TreeReader[numCols];
 
     for (int i = 0; i < numCols; i++) {
-      int colIx = batch.columnIxs[i];
-      int rgIdx = batch.batchKey.rgIx;
-
+      int columnIndex = batch.columnIxs[i];
+      int rowGroupIndex = batch.batchKey.rgIx;
       EncodedColumnBatch.StreamBuffer[] streamBuffers = batch.columnData[i];
-      OrcProto.Type colType = fileMetadata.getTypes().get(colIx);
-      // TODO: EncodedColumnBatch is already decompressed, we don't really need to pass codec.
-      // But we need to know if the original data is compressed or not. This is used to skip
positions
-      // in row index. If the file is originally compressed, then 1st position (compressed
offset)
-      // in row index should be skipped to get uncompressed offset, else 1st position should
not
-      // be skipped.
+      OrcProto.Type columnType = fileMetadata.getTypes().get(columnIndex);
+
+      // EncodedColumnBatch is already decompressed, we don't really need to pass codec.
+      // But we need to know if the original data is compressed or not. This is used to skip
+      // positions in row index properly. If the file is originally compressed,
+      // then 1st position (compressed offset) in row index should be skipped to get
+      // uncompressed offset, else 1st position should not be skipped.
       CompressionCodec codec = fileMetadata.getCompressionCodec();
       int bufferSize = fileMetadata.getCompressionBufferSize();
-      OrcProto.ColumnEncoding columnEncoding = stripeMetadata.getEncodings().get(colIx);
-      OrcProto.RowIndex rowIndex = stripeMetadata.getRowIndexes()[colIx];
-      OrcProto.RowIndexEntry rowIndexEntry = rowIndex.getEntry(rgIdx);
+      OrcProto.ColumnEncoding columnEncoding = stripeMetadata.getEncodings().get(columnIndex);
+      OrcProto.RowIndex rowIndex = stripeMetadata.getRowIndexes()[columnIndex];
+      OrcProto.RowIndexEntry rowIndexEntry = rowIndex.getEntry(rowGroupIndex);
 
+      // stream buffers are arranged in enum order of stream kind
       EncodedColumnBatch.StreamBuffer present = null;
       EncodedColumnBatch.StreamBuffer data = null;
       EncodedColumnBatch.StreamBuffer dictionary = null;
       EncodedColumnBatch.StreamBuffer lengths = null;
       EncodedColumnBatch.StreamBuffer secondary = null;
-
-
       for (EncodedColumnBatch.StreamBuffer streamBuffer : streamBuffers) {
         switch(streamBuffer.streamKind) {
           case 0:
@@ -172,11 +177,11 @@ public class OrcColumnVectorProducer ext
         }
       }
 
-      switch (colType.getKind()) {
+      switch (columnType.getKind()) {
         case BINARY:
           treeReaders[i] = BinaryStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setLengthStream(lengths)
@@ -189,7 +194,7 @@ public class OrcColumnVectorProducer ext
         case BOOLEAN:
           treeReaders[i] = BooleanStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -200,7 +205,7 @@ public class OrcColumnVectorProducer ext
         case BYTE:
           treeReaders[i] = ByteStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -211,7 +216,7 @@ public class OrcColumnVectorProducer ext
         case SHORT:
           treeReaders[i] = ShortStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -223,7 +228,7 @@ public class OrcColumnVectorProducer ext
         case INT:
           treeReaders[i] = IntStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -235,7 +240,7 @@ public class OrcColumnVectorProducer ext
         case LONG:
           treeReaders[i] = LongStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -248,7 +253,7 @@ public class OrcColumnVectorProducer ext
         case FLOAT:
           treeReaders[i] = FloatStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -259,7 +264,7 @@ public class OrcColumnVectorProducer ext
         case DOUBLE:
           treeReaders[i] = DoubleStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -271,9 +276,9 @@ public class OrcColumnVectorProducer ext
         case VARCHAR:
           treeReaders[i] = CharacterStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
-              .setMaxLength(colType.getMaximumLength())
-              .setCharacterType(colType)
+              .setColumnIndex(columnIndex)
+              .setMaxLength(columnType.getMaximumLength())
+              .setCharacterType(columnType)
               .setPresentStream(present)
               .setDataStream(data)
               .setLengthStream(lengths)
@@ -287,7 +292,7 @@ public class OrcColumnVectorProducer ext
         case STRING:
           treeReaders[i] = StringStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setLengthStream(lengths)
@@ -301,9 +306,9 @@ public class OrcColumnVectorProducer ext
         case DECIMAL:
           treeReaders[i] = DecimalStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
-              .setPrecision(colType.getPrecision())
-              .setScale(colType.getScale())
+              .setColumnIndex(columnIndex)
+              .setPrecision(columnType.getPrecision())
+              .setScale(columnType.getScale())
               .setPresentStream(present)
               .setValueStream(data)
               .setScaleStream(secondary)
@@ -316,7 +321,7 @@ public class OrcColumnVectorProducer ext
         case TIMESTAMP:
           treeReaders[i] = TimestampStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setSecondsStream(data)
               .setNanosStream(secondary)
@@ -330,7 +335,7 @@ public class OrcColumnVectorProducer ext
         case DATE:
           treeReaders[i] = DateStreamReader.builder()
               .setFileName(file)
-              .setColumnIndex(colIx)
+              .setColumnIndex(columnIndex)
               .setPresentStream(present)
               .setDataStream(data)
               .setCompressionCodec(codec)
@@ -340,7 +345,7 @@ public class OrcColumnVectorProducer ext
               .build();
           break;
         default:
-          throw new UnsupportedOperationException("Data type not supported yet! " + colType);
+          throw new UnsupportedOperationException("Data type not supported yet! " + columnType);
       }
     }
     return treeReaders;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/StreamUtils.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/StreamUtils.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/StreamUtils.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/StreamUtils.java
Fri Feb 20 23:44:59 2015
@@ -32,10 +32,21 @@ import com.google.common.collect.Lists;
 import com.google.common.primitives.Longs;
 
 /**
- *
+ * Stream utility.
  */
 public class StreamUtils {
 
+  /**
+   * Create InStream from stream buffer.
+   *
+   * @param streamName - stream name
+   * @param fileName - file name
+   * @param codec - compression codec
+   * @param bufferSize - compression buffer size
+   * @param streamBuffer - stream buffer
+   * @return - InStream
+   * @throws IOException
+   */
   public static InStream createInStream(String streamName, String fileName, CompressionCodec
codec,
       int bufferSize, EncodedColumnBatch.StreamBuffer streamBuffer) throws IOException {
     if (streamBuffer == null) {
@@ -49,6 +60,7 @@ public class StreamUtils {
     for (int i = 0; i < numBuffers; i++) {
       ByteBuffer data = streamBuffer.cacheBuffers.get(i).byteBuffer.duplicate();
       input.add(data);
+      // offset start at where previous stream buffer left off
       offsetsList.add(totalLength);
       totalLength += data.remaining();
     }
@@ -57,6 +69,12 @@ public class StreamUtils {
     return InStream.create(fileName, streamName, buffers, offsets, totalLength, codec, bufferSize);
   }
 
+  /**
+   * Converts row index entry to position provider.
+   *
+   * @param rowIndex - row index entry
+   * @return - position provider
+   */
   public static PositionProvider getPositionProvider(OrcProto.RowIndexEntry rowIndex) {
     PositionProvider positionProvider = new RecordReaderImpl.PositionProviderImpl(rowIndex);
     return positionProvider;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BinaryStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BinaryStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BinaryStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BinaryStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for binary column type.
  */
 public class BinaryStreamReader extends RecordReaderImpl.BinaryTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BooleanStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BooleanStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BooleanStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/BooleanStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for boolean column type.
  */
 public class BooleanStreamReader extends RecordReaderImpl.BooleanTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ByteStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ByteStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ByteStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ByteStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for byte column type.
  */
 public class ByteStreamReader extends RecordReaderImpl.ByteTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/CharacterStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/CharacterStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/CharacterStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/CharacterStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for char and varchar column types.
  */
 public class CharacterStreamReader extends RecordReaderImpl.StringTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DateStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DateStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DateStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DateStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for date column type.
  */
 public class DateStreamReader extends RecordReaderImpl.DateTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DecimalStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DecimalStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DecimalStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DecimalStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for decimal column type.
  */
 public class DecimalStreamReader extends RecordReaderImpl.DecimalTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DoubleStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DoubleStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DoubleStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/DoubleStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for double column type.
  */
 public class DoubleStreamReader extends RecordReaderImpl.DoubleTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/FloatStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/FloatStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/FloatStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/FloatStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for float column type.
  */
 public class FloatStreamReader extends RecordReaderImpl.FloatTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/IntStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/IntStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/IntStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/IntStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for integer column type.
  */
 public class IntStreamReader extends RecordReaderImpl.IntTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/LongStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/LongStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/LongStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/LongStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for long column type.
  */
 public class LongStreamReader extends RecordReaderImpl.LongTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ShortStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ShortStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ShortStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/ShortStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for short column type.
  */
 public class ShortStreamReader extends RecordReaderImpl.ShortTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/StringStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/StringStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/StringStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/StringStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for string column type.
  */
 public class StringStreamReader extends RecordReaderImpl.StringTreeReader {
   private boolean isFileCompressed;

Modified: hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/TimestampStreamReader.java
URL: http://svn.apache.org/viewvc/hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/TimestampStreamReader.java?rev=1661242&r1=1661241&r2=1661242&view=diff
==============================================================================
--- hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/TimestampStreamReader.java
(original)
+++ hive/branches/llap/llap-server/src/java/org/apache/hadoop/hive/llap/io/decode/orc/stream/readers/TimestampStreamReader.java
Fri Feb 20 23:44:59 2015
@@ -28,7 +28,7 @@ import org.apache.hadoop.hive.ql.io.orc.
 import org.apache.hadoop.hive.ql.io.orc.RecordReaderImpl;
 
 /**
- *
+ * Stream reader for timestamp column type.
  */
 public class TimestampStreamReader extends RecordReaderImpl.TimestampTreeReader {
   private boolean isFileCompressed;



Mime
View raw message