drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From s..@apache.org
Subject drill git commit: DRILL-1835: Fix sort and join when results include complex types
Date Thu, 11 Dec 2014 01:21:10 GMT
Repository: drill
Updated Branches:
  refs/heads/0.7.0 c65928fed -> 80fc97dec


DRILL-1835: Fix sort and join when results include complex types


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/80fc97de
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/80fc97de
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/80fc97de

Branch: refs/heads/0.7.0
Commit: 80fc97dec6835e001c105e06ab545c730829cd41
Parents: c65928f
Author: Steven Phillips <sphillips@maprtech.com>
Authored: Tue Dec 9 23:01:27 2014 -0800
Committer: Steven Phillips <sphillips@maprtech.com>
Committed: Wed Dec 10 15:21:57 2014 -0800

----------------------------------------------------------------------
 .../exec/physical/impl/TopN/TopNBatch.java      |  12 +-
 .../exec/physical/impl/join/HashJoinBatch.java  |  17 +-
 .../exec/physical/impl/join/MergeJoinBatch.java |  13 +-
 .../physical/impl/xsort/ExternalSortBatch.java  |   6 +
 .../exec/store/parquet2/DrillParquetReader.java |  19 +--
 .../drill/TestTpchDistributedStreaming.java     | 154 +++++++++++++++++++
 .../exec/store/parquet/TestParquetComplex.java  |  58 +++++++
 .../store/parquet/complex/baseline_sorted.json  | 109 +++++++++++++
 8 files changed, 369 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
index a67f835..9829fc6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/TopN/TopNBatch.java
@@ -50,6 +50,7 @@ import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.ExpandableHyperContainer;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.record.TransferPair;
 import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.record.VectorAccessible;
 import org.apache.drill.exec.record.VectorContainer;
@@ -58,6 +59,7 @@ import org.apache.drill.exec.record.WritableBatch;
 import org.apache.drill.exec.record.selection.SelectionVector2;
 import org.apache.drill.exec.record.selection.SelectionVector4;
 import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.AbstractContainerVector;
 import org.eigenbase.rel.RelFieldCollation.Direction;
 
 import com.google.common.base.Stopwatch;
@@ -132,11 +134,19 @@ public class TopNBatch extends AbstractRecordBatch<TopN> {
       case OK:
       case OK_NEW_SCHEMA:
         for (VectorWrapper w : incoming) {
-          c.addOrGet(w.getField());
+          ValueVector v = c.addOrGet(w.getField());
+          if (v instanceof AbstractContainerVector) {
+            w.getValueVector().makeTransferPair(v);
+            v.clear();
+          }
         }
         c = VectorContainer.canonicalize(c);
         for (VectorWrapper w : c) {
           ValueVector v = container.addOrGet(w.getField());
+          if (v instanceof AbstractContainerVector) {
+            w.getValueVector().makeTransferPair(v);
+            v.clear();
+          }
           v.allocateNew();
         }
         container.buildSchema(SelectionVectorMode.NONE);

http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
index 7d2557e..7b3751b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/HashJoinBatch.java
@@ -49,9 +49,12 @@ import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.ExpandableHyperContainer;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.record.TransferPair;
 import org.apache.drill.exec.record.TypedFieldId;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.VectorWrapper;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.exec.vector.complex.AbstractContainerVector;
 import org.eigenbase.rel.JoinRelType;
 
 import com.sun.codemodel.JExpr;
@@ -176,8 +179,12 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP>
{
     try {
       rightSchema = right.getSchema();
       VectorContainer c = new VectorContainer(oContext);
-      for (MaterializedField field : rightSchema) {
-        c.addOrGet(field);
+      for (VectorWrapper w : right) {
+        ValueVector v = c.addOrGet(w.getField());
+        if (v instanceof AbstractContainerVector) {
+          w.getValueVector().makeTransferPair(v);
+          v.clear();
+        }
       }
       c.buildSchema(SelectionVectorMode.NONE);
       c.setRecordCount(0);
@@ -451,7 +458,11 @@ public class HashJoinBatch extends AbstractRecordBatch<HashJoinPOP>
{
                   outputType = inputType;
                 }
 
-                container.addOrGet(MaterializedField.create(vv.getField().getPath(), outputType));
+                ValueVector v = container.addOrGet(MaterializedField.create(vv.getField().getPath(),
outputType));
+                if (v instanceof AbstractContainerVector) {
+                  vv.getValueVector().makeTransferPair(v);
+                  v.clear();
+                }
 
                 JVar inVV = g.declareVectorValueSetupAndMember("probeBatch", new TypedFieldId(inputType,
false, fieldId));
                 JVar outVV = g.declareVectorValueSetupAndMember("outgoing", new TypedFieldId(outputType,
false, outputFieldId));

http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
index 87b12b1..d0f9d7d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/join/MergeJoinBatch.java
@@ -54,6 +54,7 @@ import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.vector.AllocationHelper;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.allocator.VectorAllocator;
+import org.apache.drill.exec.vector.complex.AbstractContainerVector;
 import org.eigenbase.rel.JoinRelType;
 
 import com.google.common.base.Preconditions;
@@ -440,7 +441,11 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP>
{
             outputType = inputType;
           }
           MaterializedField newField = MaterializedField.create(w.getField().getPath(), outputType);
-          container.addOrGet(newField);
+          ValueVector v = container.addOrGet(newField);
+          if (v instanceof AbstractContainerVector) {
+            w.getValueVector().makeTransferPair(v);
+            v.clear();
+          }
         }
       }
 
@@ -454,7 +459,11 @@ public class MergeJoinBatch extends AbstractRecordBatch<MergeJoinPOP>
{
             outputType = inputType;
           }
           MaterializedField newField = MaterializedField.create(w.getField().getPath(), outputType);
-          container.addOrGet(newField);
+          ValueVector v = container.addOrGet(newField);
+          if (v instanceof AbstractContainerVector) {
+            w.getValueVector().makeTransferPair(v);
+            v.clear();
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
index f7fed46..a22ac98 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
@@ -52,6 +52,7 @@ import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;
 import org.apache.drill.exec.record.MaterializedField;
 import org.apache.drill.exec.record.RecordBatch;
+import org.apache.drill.exec.record.TransferPair;
 import org.apache.drill.exec.record.VectorAccessible;
 import org.apache.drill.exec.record.VectorContainer;
 import org.apache.drill.exec.record.VectorWrapper;
@@ -62,6 +63,7 @@ import org.apache.drill.exec.util.Utilities;
 import org.apache.drill.exec.vector.CopyUtil;
 import org.apache.drill.exec.vector.ValueVector;
 import org.apache.drill.exec.vector.allocator.VectorAllocator;
+import org.apache.drill.exec.vector.complex.AbstractContainerVector;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.eigenbase.rel.RelFieldCollation.Direction;
@@ -201,6 +203,10 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort>
{
       case OK_NEW_SCHEMA:
         for (VectorWrapper w : incoming) {
           ValueVector v = container.addOrGet(w.getField());
+          if (v instanceof AbstractContainerVector) {
+            w.getValueVector().makeTransferPair(v);
+            v.clear();
+          }
           v.allocateNew();
         }
         container.buildSchema(SelectionVectorMode.NONE);

http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
index cadd8cb..fdf9e14 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet2/DrillParquetReader.java
@@ -157,27 +157,20 @@ public class DrillParquetReader extends AbstractRecordReader {
       schemaPaths.add(schemaPath);
     }
 
-    // loop through columns in parquet schema and add columns that are included in project
list
-    outer: for (SchemaPath schemaPath : schemaPaths) {
-      for (SchemaPath columnPath : columns) {
-        if (columnPath.contains(schemaPath)) {
-          selectedSchemaPaths.add(schemaPath);
-          continue outer;
-        }
-      }
-    }
-
     // loop through projection columns and add any columns that are missing from parquet
schema to columnsNotFound list
     outer: for (SchemaPath columnPath : modifiedColumns) {
+      boolean notFound = true;
       for (SchemaPath schemaPath : schemaPaths) {
         if (schemaPath.contains(columnPath)) {
-          continue outer;
+          selectedSchemaPaths.add(schemaPath);
+          notFound = false;
         }
       }
-      columnsNotFound.add(columnPath);
+      if (notFound) {
+        columnsNotFound.add(columnPath);
+      }
     }
 
-
     // convert SchemaPaths from selectedSchemaPaths and convert to parquet type, and merge
into projection schema
     for (SchemaPath schemaPath : selectedSchemaPaths) {
       List<String> segments = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
new file mode 100644
index 0000000..4a8d7c9
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+public class TestTpchDistributedStreaming extends BaseTestQuery{
+  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTpchDistributedStreaming.class);
+
+  private void testDistributed(String fileName) throws Exception{
+    String query = getFile(fileName);
+    test("alter session set `planner.slice_target` = 10; alter session set `planner.enable_hashjoin`
= false; " +
+            "alter session set `planner.enable_hashagg` = false; " + query);
+  }
+
+  @Test
+  public void tpch01() throws Exception{
+    testDistributed("queries/tpch/01.sql");
+  }
+
+  @Test
+  @Ignore // DRILL-512
+  public void tpch02() throws Exception{
+    testDistributed("queries/tpch/02.sql");
+  }
+
+  @Test
+  public void tpch03() throws Exception{
+    testDistributed("queries/tpch/03.sql");
+  }
+
+  @Test
+  public void tpch04() throws Exception{
+    testDistributed("queries/tpch/04.sql");
+  }
+
+  @Test
+  public void tpch05() throws Exception{
+    testDistributed("queries/tpch/05.sql");
+  }
+
+  @Test
+  public void tpch06() throws Exception{
+    testDistributed("queries/tpch/06.sql");
+  }
+
+  @Test
+  public void tpch07() throws Exception{
+    testDistributed("queries/tpch/07.sql");
+  }
+
+  @Test
+  public void tpch08() throws Exception{
+    testDistributed("queries/tpch/08.sql");
+  }
+
+  @Test
+  public void tpch09() throws Exception{
+    testDistributed("queries/tpch/09.sql");
+  }
+
+  @Test
+  public void tpch10() throws Exception{
+    testDistributed("queries/tpch/10.sql");
+  }
+
+  @Test
+  @Ignore // cartesion problem
+  public void tpch11() throws Exception{
+    testDistributed("queries/tpch/11.sql");
+  }
+
+  @Test
+  public void tpch12() throws Exception{
+    testDistributed("queries/tpch/12.sql");
+  }
+
+  @Test
+  public void tpch13() throws Exception{
+    testDistributed("queries/tpch/13.sql");
+  }
+
+  @Test
+  public void tpch14() throws Exception{
+    testDistributed("queries/tpch/14.sql");
+  }
+
+  @Test
+  @Ignore // non-equality join
+  public void tpch15() throws Exception{
+    testDistributed("queries/tpch/15.sql");
+  }
+
+  @Test
+  public void tpch16() throws Exception{
+    testDistributed("queries/tpch/16.sql");
+  }
+
+  @Test
+  @Ignore // non-equality join
+  public void tpch17() throws Exception{
+    testDistributed("queries/tpch/17.sql");
+  }
+
+  @Test
+  public void tpch18() throws Exception{
+    testDistributed("queries/tpch/18.sql");
+  }
+
+  @Test
+  @Ignore // non-equality join
+  public void tpch19() throws Exception{
+    testDistributed("queries/tpch/19.sql");
+  }
+
+  @Test
+  public void tpch19_1() throws Exception{
+    testDistributed("queries/tpch/19_1.sql");
+  }
+
+  @Test
+  public void tpch20() throws Exception{
+    testDistributed("queries/tpch/20.sql");
+  }
+
+  @Test
+  @Ignore
+  public void tpch21() throws Exception{
+    testDistributed("queries/tpch/21.sql");
+  }
+
+  @Test
+  @Ignore // DRILL-518
+  public void tpch22() throws Exception{
+    testDistributed("queries/tpch/22.sql");
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
index 892d453..8405d0e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestParquetComplex.java
@@ -25,6 +25,64 @@ public class TestParquetComplex extends BaseTestQuery {
   private static final String DATAFILE = "cp.`store/parquet/complex/complex.parquet`";
 
   @Test
+  public void sort() throws Exception {
+    String query = String.format("select * from %s order by amount", DATAFILE);
+    testBuilder()
+            .sqlQuery(query)
+            .ordered()
+            .jsonBaselineFile("store/parquet/complex/baseline_sorted.json")
+            .build()
+            .run();
+  }
+
+  @Test
+  public void topN() throws Exception {
+    String query = String.format("select * from %s order by amount limit 5", DATAFILE);
+    testBuilder()
+            .sqlQuery(query)
+            .ordered()
+            .jsonBaselineFile("store/parquet/complex/baseline_sorted.json")
+            .build()
+            .run();
+  }
+
+  @Test
+  public void hashJoin() throws Exception{
+    String query = String.format("select t1.amount, t1.`date`, t1.marketing_info, t1.`time`,
t1.trans_id, t1.trans_info, t1.user_info " +
+            "from %s t1, %s t2 where t1.amount = t2.amount", DATAFILE, DATAFILE);
+    testBuilder()
+            .sqlQuery(query)
+            .unOrdered()
+            .jsonBaselineFile("store/parquet/complex/baseline.json")
+            .build()
+            .run();
+  }
+
+  @Test
+  public void mergeJoin() throws Exception{
+    test("alter session set `planner.enable_hashjoin` = false");
+    String query = String.format("select t1.amount, t1.`date`, t1.marketing_info, t1.`time`,
t1.trans_id, t1.trans_info, t1.user_info " +
+            "from %s t1, %s t2 where t1.amount = t2.amount", DATAFILE, DATAFILE);
+    testBuilder()
+            .sqlQuery(query)
+            .unOrdered()
+            .jsonBaselineFile("store/parquet/complex/baseline.json")
+            .build()
+            .run();
+  }
+
+  @Test
+  public void selectAllColumns() throws Exception {
+    String query = String.format("select amount, `date`, marketing_info, `time`, trans_id,
trans_info, user_info from %s", DATAFILE);
+    testBuilder()
+            .sqlQuery(query)
+            .ordered()
+            .jsonBaselineFile("store/parquet/complex/baseline.json")
+            .build()
+            .run();
+  }
+
+  @Test
   public void selectMap() throws Exception {
     String query = "select marketing_info from cp.`store/parquet/complex/complex.parquet`";
     testBuilder()

http://git-wip-us.apache.org/repos/asf/drill/blob/80fc97de/exec/java-exec/src/test/resources/store/parquet/complex/baseline_sorted.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/store/parquet/complex/baseline_sorted.json
b/exec/java-exec/src/test/resources/store/parquet/complex/baseline_sorted.json
new file mode 100644
index 0000000..f800514
--- /dev/null
+++ b/exec/java-exec/src/test/resources/store/parquet/complex/baseline_sorted.json
@@ -0,0 +1,109 @@
+{ "amount" : 20.25,
+    "date" : "2013-06-09",
+    "marketing_info" : { "camp_id" : 17,
+        "keywords" : [  ]
+    },
+    "time" : "15:31:45",
+    "trans_id" : 2,
+    "trans_info" : { "prod_id" : [ 293,
+        90
+    ],
+        "purch_flag" : "true"
+    },
+    "user_info" : { "cust_id" : 11,
+        "device" : "IOS5",
+        "state" : "la"
+    }
+}
+{ "amount" : 34.200000000000003,
+"date" : "2013-07-21",
+"marketing_info" : { "camp_id" : 8,
+"keywords" : [ "fallout" ]
+},
+"time" : "08:01:13",
+"trans_id" : 4,
+"trans_info" : { "prod_id" : [ 311,
+29,
+5,
+41
+],
+"purch_flag" : "false"
+},
+"user_info" : { "cust_id" : 999,
+"device" : "IOS7",
+"state" : "ct"
+}
+}
+{ "amount" : 80.5,
+    "date" : "2013-07-26",
+    "marketing_info" : { "camp_id" : 4,
+        "keywords" : [ "go",
+            "to",
+            "thing",
+            "watch",
+            "made",
+            "laughing",
+            "might",
+            "pay",
+            "in",
+            "your",
+            "hold"
+        ]
+    },
+    "time" : "04:56:59",
+    "trans_id" : 0,
+    "trans_info" : { "prod_id" : [ 16 ],
+        "purch_flag" : "false"
+    },
+    "user_info" : { "cust_id" : 28,
+        "device" : "IOS5",
+        "state" : "mt"
+    }
+}
+{ "amount" : 100.40000000000001,
+  "date" : "2013-05-16",
+  "marketing_info" : { "camp_id" : 6,
+      "keywords" : [ "pronounce",
+          "tree",
+          "instead",
+          "games",
+          "sigh"
+        ]
+    },
+  "time" : "07:31:54",
+  "trans_id" : 1,
+  "trans_info" : { "prod_id" : [  ],
+      "purch_flag" : "false"
+    },
+  "user_info" : { "cust_id" : 86623,
+      "device" : "AOS4.2",
+      "state" : "mi"
+    }
+}
+{ "amount" : 500.75,
+  "date" : "2013-07-19",
+  "marketing_info" : { "camp_id" : 17,
+      "keywords" : [ "it's" ]
+    },
+  "time" : "11:24:22",
+  "trans_id" : 3,
+  "trans_info" : { "prod_id" : [ 173,
+          18,
+          121,
+          84,
+          115,
+          226,
+          464,
+          525,
+          35,
+          11,
+          94,
+          45
+        ],
+      "purch_flag" : "false"
+    },
+  "user_info" : { "cust_id" : 666,
+      "device" : "IOS5",
+      "state" : "nj"
+    }
+}


Mime
View raw message