phoenix-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravimag...@apache.org
Subject [1/2] git commit: Phoenix 1040 : Changes to PhoenixRecordReader to fix the scan
Date Sun, 21 Sep 2014 17:28:39 GMT
Repository: phoenix
Updated Branches:
  refs/heads/4.0 987acf6d1 -> 2ccb62d18


Phoenix 1040 : Changes to PhoenixRecordReader to fix the scan


Project: http://git-wip-us.apache.org/repos/asf/phoenix/repo
Commit: http://git-wip-us.apache.org/repos/asf/phoenix/commit/27b3865c
Tree: http://git-wip-us.apache.org/repos/asf/phoenix/tree/27b3865c
Diff: http://git-wip-us.apache.org/repos/asf/phoenix/diff/27b3865c

Branch: refs/heads/4.0
Commit: 27b3865c27b928d229bfb92d7f2b5eece0188321
Parents: 987acf6
Author: mravi <maghamravikiran@gmail.com>
Authored: Sun Sep 21 10:27:11 2014 -0700
Committer: mravi <maghamravikiran@gmail.com>
Committed: Sun Sep 21 10:27:11 2014 -0700

----------------------------------------------------------------------
 .../phoenix/pig/PhoenixHBaseLoaderIT.java       | 57 ++++++++++++++++++++
 .../phoenix/pig/hadoop/PhoenixInputFormat.java  | 19 ++-----
 .../phoenix/pig/hadoop/PhoenixRecordReader.java |  7 ++-
 3 files changed, 65 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/phoenix/blob/27b3865c/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
----------------------------------------------------------------------
diff --git a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
index 8daea9c..d82e6b0 100644
--- a/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
+++ b/phoenix-pig/src/it/java/org/apache/phoenix/pig/PhoenixHBaseLoaderIT.java
@@ -307,6 +307,63 @@ public class PhoenixHBaseLoaderIT {
     }
     
     /**
+     * 
+     * @throws Exception
+     */
+    @Test
+    public void testForNonPKSQLQuery() throws Exception {
+        
+         //create the table
+         String ddl = "CREATE TABLE  " + TABLE_FULL_NAME 
+                + " ( ID VARCHAR PRIMARY KEY, FOO VARCHAR, BAR INTEGER, BAZ UNSIGNED_INT)";
+                
+        conn.createStatement().execute(ddl);
+        
+        //upsert data.
+        final String dml = "UPSERT INTO " + TABLE_FULL_NAME + " VALUES(?,?,?,?) ";
+        PreparedStatement stmt = conn.prepareStatement(dml);
+        stmt.setString(1, "a");
+        stmt.setString(2, "a");
+        stmt.setInt(3,-1);
+        stmt.setInt(4,1);
+        stmt.execute();
+       
+        stmt.setString(1, "b");
+        stmt.setString(2, "b");
+        stmt.setInt(3,-2);
+        stmt.setInt(4,2);
+        stmt.execute();
+        
+        conn.commit();
+        
+        //sql query
+        final String sqlQuery = String.format(" SELECT FOO, BAZ FROM %s WHERE BAR = -1 "
, TABLE_FULL_NAME);
+      
+        pigServer.registerQuery(String.format(
+                "A = load 'hbase://query/%s' using org.apache.phoenix.pig.PhoenixHBaseLoader('%s');",
sqlQuery,
+                zkQuorum));
+        
+        final Iterator<Tuple> iterator = pigServer.openIterator("A");
+        int recordsRead = 0;
+        while (iterator.hasNext()) {
+            final Tuple tuple = iterator.next();
+            assertEquals("a", tuple.get(0));
+            assertEquals(1, tuple.get(1));
+            recordsRead++;
+        }
+        assertEquals(1, recordsRead);
+        
+        //test the schema. Test for PHOENIX-1123
+        Schema schema = pigServer.dumpSchema("A");
+        List<FieldSchema> fields = schema.getFields();
+        assertEquals(2, fields.size());
+        assertTrue(fields.get(0).alias.equalsIgnoreCase("FOO"));
+        assertTrue(fields.get(0).type == DataType.CHARARRAY);
+        assertTrue(fields.get(1).alias.equalsIgnoreCase("BAZ"));
+        assertTrue(fields.get(1).type == DataType.INTEGER);
+    }
+    
+    /**
      * @throws Exception
      */
     @Test

http://git-wip-us.apache.org/repos/asf/phoenix/blob/27b3865c/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
----------------------------------------------------------------------
diff --git a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
index ebb9023..6899099 100644
--- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
+++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixInputFormat.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.mapreduce.RecordReader;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.phoenix.compile.QueryPlan;
 import org.apache.phoenix.compile.StatementContext;
-import org.apache.phoenix.iterate.ResultIterator;
 import org.apache.phoenix.jdbc.PhoenixStatement;
 import org.apache.phoenix.pig.PhoenixPigConfiguration;
 import org.apache.phoenix.query.KeyRange;
@@ -46,7 +45,6 @@ import org.apache.phoenix.schema.TableRef;
 import org.apache.phoenix.util.ScanUtil;
 
 import com.google.common.base.Preconditions;
-import com.google.common.base.Throwables;
 import com.google.common.collect.Lists;
 
 /**
@@ -82,18 +80,10 @@ public final class PhoenixInputFormat extends InputFormat<NullWritable,
PhoenixR
 
     @Override
     public List<InputSplit> getSplits(JobContext context) throws IOException, InterruptedException
{        
-        List<InputSplit> splits = null;
-        try{
-            setConf(context.getConfiguration());
-            final QueryPlan queryPlan = getQueryPlan(context);
-            @SuppressWarnings("unused")
-            final ResultIterator iterator = queryPlan.iterator();
-            final List<KeyRange> allSplits = queryPlan.getSplits();
-            splits = generateSplits(queryPlan,allSplits);
-        } catch(SQLException sqlE) {
-            LOG.error(String.format(" Error [%s] in getSplits of PhoenixInputFormat ", sqlE.getMessage()));
-            Throwables.propagate(sqlE);
-        }
+        setConf(context.getConfiguration());
+        final QueryPlan queryPlan = getQueryPlan(context);
+        final List<KeyRange> allSplits = queryPlan.getSplits();
+        final List<InputSplit> splits = generateSplits(queryPlan,allSplits);
         return splits;
     }
 
@@ -157,6 +147,7 @@ public final class PhoenixInputFormat extends InputFormat<NullWritable,
PhoenixR
                 final Statement statement = connection.createStatement();
                 final PhoenixStatement pstmt = statement.unwrap(PhoenixStatement.class);
                 this.queryPlan = pstmt.compileQuery(selectStatement);
+                this.queryPlan.iterator();
             } catch(Exception exception) {
                 LOG.error(String.format("Failed to get the query plan with error [%s]",exception.getMessage()));
                 throw new RuntimeException(exception);

http://git-wip-us.apache.org/repos/asf/phoenix/blob/27b3865c/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixRecordReader.java
----------------------------------------------------------------------
diff --git a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixRecordReader.java
b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixRecordReader.java
index 6d30c3f..5c6afb3 100644
--- a/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixRecordReader.java
+++ b/phoenix-pig/src/main/java/org/apache/phoenix/pig/hadoop/PhoenixRecordReader.java
@@ -36,6 +36,7 @@ import org.apache.phoenix.iterate.TableResultIterator;
 import org.apache.phoenix.jdbc.PhoenixResultSet;
 import org.apache.phoenix.pig.PhoenixPigConfiguration;
 import org.apache.phoenix.query.KeyRange;
+import org.apache.phoenix.util.ScanUtil;
 
 import com.google.common.base.Preconditions;
 import com.google.common.base.Throwables;
@@ -96,10 +97,8 @@ public final class PhoenixRecordReader extends RecordReader<NullWritable,Phoenix
         final KeyRange keyRange = pSplit.getKeyRange();
         final Scan splitScan = queryPlan.getContext().getScan();
         final Scan scan = new Scan(splitScan);
-        scan.setStartRow(keyRange.getLowerRange());
-        scan.setStopRow(keyRange.getUpperRange());
-         try {
-            //this.resultIterator = queryPlan.iterator();
+        ScanUtil.intersectScanRange(scan, keyRange.getLowerRange(), keyRange.getUpperRange(),
queryPlan.getContext().getScanRanges().useSkipScanFilter());
+        try {
              TableResultIterator tableResultIterator = new TableResultIterator(queryPlan.getContext(),
queryPlan.getTableRef(),scan);
             if(queryPlan.getContext().getSequenceManager().getSequenceCount() > 0) {
                     this.resultIterator = new SequenceResultIterator(tableResultIterator,
queryPlan.getContext().getSequenceManager());


Mime
View raw message