phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jeffr...@apache.org
Subject [42/50] [abbrv] merge master to 4.0 branch
Date Wed, 05 Mar 2014 22:55:28 GMT
http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/b3a330ca/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
----------------------------------------------------------------------
diff --cc phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
index a153e73,0f91138..44bc913
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ScanRegionObserver.java
@@@ -24,11 -22,13 +22,14 @@@ import java.io.ByteArrayOutputStream
  import java.io.DataInputStream;
  import java.io.DataOutputStream;
  import java.io.IOException;
+ import java.util.ArrayList;
  import java.util.List;
  
+ import org.apache.hadoop.hbase.HConstants;
 +import org.apache.hadoop.hbase.Cell;
  import org.apache.hadoop.hbase.HRegionInfo;
  import org.apache.hadoop.hbase.KeyValue;
+ import org.apache.hadoop.hbase.KeyValue.Type;
  import org.apache.hadoop.hbase.client.Scan;
  import org.apache.hadoop.hbase.coprocessor.ObserverContext;
  import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
@@@ -275,28 -345,71 +331,76 @@@ public class ScanRegionObserver extend
              }
  
              @Override
 -            public boolean nextRaw(List<KeyValue> result, String metric) throws IOException
{
 +            public boolean nextRaw(List<Cell> result) throws IOException {
                  try {
-                     return s.nextRaw(result);
 -                    boolean next = s.nextRaw(result, metric);
++                    boolean next = s.nextRaw(result);
+                     if(result.size() == 0) {
+                         return next;
+                     } else if((arrayFuncRefs != null && arrayFuncRefs.length ==
0) || arrayKVRefs.size() == 0) {
+                         return next;
+                     }
+                     replaceArrayIndexElement(arrayKVRefs, arrayFuncRefs, result);
+                     return next;
                  } catch (Throwable t) {
                      ServerUtil.throwIOException(c.getEnvironment().getRegion().getRegionNameAsString(),
t);
                      return false; // impossible
                  }
              }
  
 -            
 -
              @Override
 -            public boolean nextRaw(List<KeyValue> result, int limit, String metric)
throws IOException {
 +            public boolean nextRaw(List<Cell> result, int limit) throws IOException
{
                  try {
-                     return s.nextRaw(result, limit);
 -                    boolean next = s.nextRaw(result, limit, metric);
++                    boolean next = s.nextRaw(result, limit);
+                     if (result.size() == 0) {
+                         return next;
+                     } else if ((arrayFuncRefs != null && arrayFuncRefs.length ==
0) || arrayKVRefs.size() == 0) { 
+                         return next; 
+                     }
+                     // There is a scanattribute set to retrieve the specific array element
+                     replaceArrayIndexElement(arrayKVRefs, arrayFuncRefs, result);
+                     return next;
                  } catch (Throwable t) {
                      ServerUtil.throwIOException(c.getEnvironment().getRegion().getRegionNameAsString(),
t);
                      return false; // impossible
                  }
              }
+ 
+             private void replaceArrayIndexElement(final List<KeyValueColumnExpression>
arrayKVRefs,
 -                    final Expression[] arrayFuncRefs, List<KeyValue> result) {
++                    final Expression[] arrayFuncRefs, List<Cell> result) {
+                 MultiKeyValueTuple tuple = new MultiKeyValueTuple(result);
+                 // The size of both the arrays would be same?
+                 // Using KeyValueSchema to set and retrieve the value
+                 // collect the first kv to get the row
 -                KeyValue rowKv = result.get(0);
++                Cell rowKv = result.get(0);
+                 for (int i = 0; i < arrayKVRefs.size(); i++) {
+                     KeyValueColumnExpression kvExp = arrayKVRefs.get(i);
+                     if (kvExp.evaluate(tuple, ptr)) {
+                         for (int idx = tuple.size() - 1; idx >= 0; idx--) {
 -                            KeyValue kv = tuple.getValue(idx);
 -                            if (Bytes.equals(kvExp.getColumnFamily(), kv.getFamily())
 -                                    && Bytes.equals(kvExp.getColumnName(), kv.getQualifier()))
{
++                        	Cell kv = tuple.getValue(idx);
++                            if (Bytes.equals(kvExp.getColumnFamily(), 0, kvExp.getColumnFamily().length,

++                            		kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength())
++                                && Bytes.equals(kvExp.getColumnName(), 0, kvExp.getColumnName().length,

++                                		kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength()))
{
+                                 // remove the kv that has the full array values.
+                                 result.remove(idx);
+                                 break;
+                             }
+                         }
+                     }
+                 }
+                 byte[] value = kvSchema.toBytes(tuple, arrayFuncRefs,
+                         kvSchemaBitSet, ptr);
+                 // Add a dummy kv with the exact value of the array index
 -                result.add(new KeyValue(rowKv.getBuffer(), rowKv.getRowOffset(), rowKv.getRowLength(),
++                result.add(new KeyValue(rowKv.getRowArray(), rowKv.getRowOffset(), rowKv.getRowLength(),
+                         QueryConstants.ARRAY_VALUE_COLUMN_FAMILY, 0, QueryConstants.ARRAY_VALUE_COLUMN_FAMILY.length,
+                         QueryConstants.ARRAY_VALUE_COLUMN_QUALIFIER, 0,
+                         QueryConstants.ARRAY_VALUE_COLUMN_QUALIFIER.length, HConstants.LATEST_TIMESTAMP,
 -                        Type.codeToType(rowKv.getType()), value, 0, value.length));
++                        Type.codeToType(rowKv.getTypeByte()), value, 0, value.length));
++            }
 +            
 +            @Override
 +            public long getMaxResultSize() {
 +                return s.getMaxResultSize();
              }
          };
      }

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/b3a330ca/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
----------------------------------------------------------------------
diff --cc phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
index e82d4ee,b8eb7d4..d715591
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/SequenceRegionObserver.java
@@@ -21,9 -38,9 +39,10 @@@ import org.apache.hadoop.hbase.coproces
  import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
  import org.apache.hadoop.hbase.io.TimeRange;
  import org.apache.hadoop.hbase.regionserver.HRegion;
 +import org.apache.hadoop.hbase.regionserver.HRegion.RowLock;
  import org.apache.hadoop.hbase.util.Bytes;
  import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
+ import org.apache.hadoop.hbase.util.Pair;
  import org.apache.phoenix.exception.SQLExceptionCode;
  import org.apache.phoenix.jdbc.PhoenixDatabaseMetaData;
  import org.apache.phoenix.query.QueryConstants;
@@@ -106,12 -111,14 +126,16 @@@ public class SequenceRegionObserver ext
                      maxTimestamp = EnvironmentEdgeManager.currentTimeMillis();
                      tr = new TimeRange(tr.getMin(), maxTimestamp);
                  }
+                 boolean validateOnly = true;
                  Get get = new Get(row);
                  get.setTimeRange(tr.getMin(), tr.getMax());
 -                for (Map.Entry<byte[],NavigableMap<byte[], Long>> entry : increment.getFamilyMap().entrySet())
{
 +                for (Map.Entry<byte[], List<Cell>> entry : increment.getFamilyCellMap().entrySet())
{
                      byte[] cf = entry.getKey();
 -                    for (Map.Entry<byte[],Long> kvEntry : entry.getValue().entrySet())
{
 -                        get.addColumn(cf, kvEntry.getKey());
 -                        validateOnly &= (Sequence.Action.VALIDATE.ordinal() == kvEntry.getValue().intValue());
 +                    for (Cell cq : entry.getValue()) {
++                    	long value = PDataType.LONG.getCodec().decodeLong(cq.getValueArray(),
cq.getValueOffset(), 
++                    			SortOrder.getDefault());
 +                        get.addColumn(cf, CellUtil.cloneQualifier(cq));
++                        validateOnly &= (Sequence.Action.VALIDATE.ordinal() == value);
                      }
                  }
                  Result result = region.get(get);
@@@ -121,9 -131,9 +148,11 @@@
                  KeyValue currentValueKV = Sequence.getCurrentValueKV(result);
                  KeyValue incrementByKV = Sequence.getIncrementByKV(result);
                  KeyValue cacheSizeKV = Sequence.getCacheSizeKV(result);
-                 long value = PDataType.LONG.getCodec().decodeLong(currentValueKV.getValueArray(),
currentValueKV.getValueOffset(), null);
-                 long incrementBy = PDataType.LONG.getCodec().decodeLong(incrementByKV.getValueArray(),
incrementByKV.getValueOffset(), null);
-                 int cacheSize = PDataType.INTEGER.getCodec().decodeInt(cacheSizeKV.getValueArray(),
cacheSizeKV.getValueOffset(), null);
 -                long value = PDataType.LONG.getCodec().decodeLong(currentValueKV.getBuffer(),
currentValueKV.getValueOffset(), SortOrder.getDefault());
 -                long incrementBy = PDataType.LONG.getCodec().decodeLong(incrementByKV.getBuffer(),
incrementByKV.getValueOffset(), SortOrder.getDefault());
 -                int cacheSize = PDataType.INTEGER.getCodec().decodeInt(cacheSizeKV.getBuffer(),
cacheSizeKV.getValueOffset(), SortOrder.getDefault());
++
++                long value = PDataType.LONG.getCodec().decodeLong(currentValueKV.getValueArray(),
currentValueKV.getValueOffset(), SortOrder.getDefault());
++                long incrementBy = PDataType.LONG.getCodec().decodeLong(incrementByKV.getValueArray(),
incrementByKV.getValueOffset(), SortOrder.getDefault());
++                int cacheSize = PDataType.INTEGER.getCodec().decodeInt(cacheSizeKV.getValueArray(),
cacheSizeKV.getValueOffset(), SortOrder.getDefault());
++
                  value += incrementBy * cacheSize;
                  byte[] valueBuffer = new byte[PDataType.LONG.getByteSize()];
                  PDataType.LONG.getCodec().encodeLong(value, valueBuffer, 0);
@@@ -227,20 -234,17 +256,20 @@@
                  switch (op) {
                  case RETURN_SEQUENCE:
                      KeyValue currentValueKV = result.raw()[0];
-                     long expectedValue = PDataType.LONG.getCodec().decodeLong(append.getAttribute(CURRENT_VALUE_ATTRIB),
0, null);
+                     long expectedValue = PDataType.LONG.getCodec().decodeLong(append.getAttribute(CURRENT_VALUE_ATTRIB),
0, SortOrder.getDefault());
 -                    long value = PDataType.LONG.getCodec().decodeLong(currentValueKV.getBuffer(),
currentValueKV.getValueOffset(), SortOrder.getDefault());
 +                    long value = PDataType.LONG.getCodec().decodeLong(currentValueKV.getValueArray(),

-                       currentValueKV.getValueOffset(), null);
++                      currentValueKV.getValueOffset(), SortOrder.getDefault());
                      // Timestamp should match exactly, or we may have the wrong sequence
                      if (expectedValue != value || currentValueKV.getTimestamp() != clientTimestamp)
{
 -                        return new Result(Collections.singletonList(KeyValueUtil.newKeyValue(row,
PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES, QueryConstants.EMPTY_COLUMN_BYTES, currentValueKV.getTimestamp(),
ByteUtil.EMPTY_BYTE_ARRAY)));
 +                        return Result.create(Collections.singletonList(
 +                          (Cell)KeyValueUtil.newKeyValue(row, PhoenixDatabaseMetaData.SEQUENCE_FAMILY_BYTES,

 +                            QueryConstants.EMPTY_COLUMN_BYTES, currentValueKV.getTimestamp(),
ByteUtil.EMPTY_BYTE_ARRAY)));
                      }
                      m = new Put(row, currentValueKV.getTimestamp());
 -                    m.getFamilyMap().putAll(append.getFamilyMap());
 +                    m.getFamilyCellMap().putAll(append.getFamilyCellMap());
                      break;
                  case DROP_SEQUENCE:
 -                    m = new Delete(row, clientTimestamp, null);
 +                    m = new Delete(row, clientTimestamp);
                      break;
                  case CREATE_SEQUENCE:
                      m = new Put(row, clientTimestamp);

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/b3a330ca/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ServerCachingEndpointImpl.java
----------------------------------------------------------------------
diff --cc phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ServerCachingEndpointImpl.java
index 9331547,6dea838..2f31b08
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ServerCachingEndpointImpl.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ServerCachingEndpointImpl.java
@@@ -19,29 -17,18 +17,31 @@@
   */
  package org.apache.phoenix.coprocessor;
  
 +import java.io.IOException;
  import java.sql.SQLException;
  
 -import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
 +import org.apache.hadoop.hbase.Coprocessor;
 +import org.apache.hadoop.hbase.CoprocessorEnvironment;
 +import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
 +import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
  import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
- import org.apache.hadoop.hbase.index.util.ImmutableBytesPtr;
  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
+ 
  import org.apache.phoenix.cache.GlobalCache;
  import org.apache.phoenix.cache.TenantCache;
+ import org.apache.phoenix.hbase.index.util.ImmutableBytesPtr;
+ 
 +import org.apache.phoenix.coprocessor.ServerCachingProtocol.ServerCacheFactory;
 +import org.apache.phoenix.coprocessor.generated.ServerCachingProtos.AddServerCacheRequest;
 +import org.apache.phoenix.coprocessor.generated.ServerCachingProtos.AddServerCacheResponse;
 +import org.apache.phoenix.coprocessor.generated.ServerCachingProtos.RemoveServerCacheRequest;
 +import org.apache.phoenix.coprocessor.generated.ServerCachingProtos.RemoveServerCacheResponse;
 +import org.apache.phoenix.coprocessor.generated.ServerCachingProtos.ServerCachingService;
 +import org.apache.phoenix.protobuf.ProtobufUtil;
  
 -
 +import com.google.protobuf.RpcCallback;
 +import com.google.protobuf.RpcController;
 +import com.google.protobuf.Service;
  
  /**
   * 

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/b3a330ca/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/ServerCachingProtocol.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-phoenix/blob/b3a330ca/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
----------------------------------------------------------------------
diff --cc phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
index 42de847,0a840d4..0e5df46
--- a/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
+++ b/phoenix-core/src/main/java/org/apache/phoenix/coprocessor/UngroupedAggregateRegionObserver.java
@@@ -56,8 -55,8 +54,9 @@@ import org.slf4j.LoggerFactory
  
  import com.google.common.collect.Lists;
  import com.google.common.collect.Sets;
+ 
  import org.apache.phoenix.client.KeyValueBuilder;
 +import org.apache.phoenix.coprocessor.generated.PTableProtos;
  import org.apache.phoenix.exception.ValueTypeIncompatibleException;
  import org.apache.phoenix.expression.Expression;
  import org.apache.phoenix.expression.ExpressionType;


Mime
View raw message