incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ga...@apache.org
Subject svn commit: r1293495 [2/2] - in /incubator/hcatalog/trunk: ./ src/java/org/apache/hcatalog/common/ src/java/org/apache/hcatalog/data/ src/java/org/apache/hcatalog/mapreduce/ src/java/org/apache/hcatalog/pig/ src/test/org/apache/hcatalog/data/ src/test/...
Date Sat, 25 Feb 2012 00:45:14 GMT
Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java?rev=1293495&r1=1293494&r2=1293495&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/mapreduce/PartInfo.java Sat Feb
25 00:45:13 2012
@@ -21,7 +21,11 @@ import java.io.Serializable;
 import java.util.Map;
 import java.util.Properties;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.ql.plan.TableDesc;
+
 import org.apache.hcatalog.data.schema.HCatSchema;
+import org.apache.hcatalog.mapreduce.HCatStorageHandler;
 
 /** The Class used to serialize the partition information read from the metadata server that
maps to a partition */
 public class PartInfo implements Serializable {
@@ -32,8 +36,8 @@ public class PartInfo implements Seriali
   /** The partition schema. */
   private final HCatSchema partitionSchema;
 
-  /** The information about which input storage driver to use */
-  private final String inputStorageDriverClass;
+  /** The information about which input storage handler to use */
+  private final HCatStorageHandler storageHandler;
 
   /** HCat-specific properties set at the partition */
   private final Properties hcatProperties;
@@ -44,18 +48,28 @@ public class PartInfo implements Seriali
   /** The map of partition key names and their values. */
   private Map<String,String> partitionValues;
 
+  /** Job properties associated with this parition */
+  Map<String,String> jobProperties;
+
+  /** the table info associated with this partition */
+  HCatTableInfo tableInfo;
+
   /**
    * Instantiates a new hcat partition info.
    * @param partitionSchema the partition schema
-   * @param inputStorageDriverClass the input storage driver class name
+   * @param storageHandler the storage handler
    * @param location the location
    * @param hcatProperties hcat-specific properties at the partition
    */
-  public PartInfo(HCatSchema partitionSchema, String inputStorageDriverClass, String location,
Properties hcatProperties){
+  public PartInfo(HCatSchema partitionSchema, HCatStorageHandler storageHandler,
+                  String location, Properties hcatProperties, 
+                  Map<String,String> jobProperties, HCatTableInfo tableInfo){
     this.partitionSchema = partitionSchema;
-    this.inputStorageDriverClass = inputStorageDriverClass;
+    this.storageHandler = storageHandler;
     this.location = location;
     this.hcatProperties = hcatProperties;
+    this.jobProperties = jobProperties;
+    this.tableInfo = tableInfo;
   }
 
   /**
@@ -71,8 +85,8 @@ public class PartInfo implements Seriali
    * Gets the value of input storage driver class name.
    * @return the input storage driver class name
    */
-  public String getInputStorageDriverClass() {
-    return inputStorageDriverClass;
+  public HCatStorageHandler getStorageHandler() {
+    return storageHandler;
   }
 
 
@@ -80,7 +94,7 @@ public class PartInfo implements Seriali
    * Gets the value of hcatProperties.
    * @return the hcatProperties
    */
-  public Properties getInputStorageDriverProperties() {
+  public Properties getInputStorageHandlerProperties() {
     return hcatProperties;
   }
 
@@ -107,4 +121,12 @@ public class PartInfo implements Seriali
   public Map<String,String> getPartitionValues() {
     return partitionValues;
   }
+
+  public Map<String,String> getJobProperties() {
+    return jobProperties;
+  }
+
+  public HCatTableInfo getTableInfo() {
+    return tableInfo;
+  }
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java?rev=1293495&r1=1293494&r2=1293495&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/pig/HCatLoader.java Sat Feb 25 00:45:13
2012
@@ -88,11 +88,12 @@ public class HCatLoader extends HCatBase
     // the Configuration
     if (!HCatUtil.checkJobContextIfRunningFromBackend(job)){
       HCatInputFormat.setInput(job,
-                                            InputJobInfo.create(dbName,
-                                                                         tableName,
-                                                                         getPartitionFilterString(),
-                                                                         hcatServerUri !=
null ? hcatServerUri : (hcatServerUri = PigHCatUtil.getHCatServerUri(job)),
-                                                                         PigHCatUtil.getHCatServerPrincipal(job)));
+                               InputJobInfo.create(dbName,
+                                                   tableName,
+                                                   getPartitionFilterString(),
+                 hcatServerUri != null ? hcatServerUri : 
+                  (hcatServerUri = PigHCatUtil.getHCatServerUri(job)),
+                                    PigHCatUtil.getHCatServerPrincipal(job)));
     }
 
     // Need to also push projections by calling setOutputSchema on

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java?rev=1293495&r1=1293494&r2=1293495&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestHCatRecordSerDe.java Sat
Feb 25 00:45:13 2012
@@ -153,10 +153,12 @@ public class TestHCatRecordSerDe extends
       System.out.println("FOUR:"+s4.toString());
 
       // Test LazyHCatRecord init and read
-      LazyHCatRecord s5 = new LazyHCatRecord(o3,testSD.getObjectInspector());
+      LazyHCatRecord s5 = new LazyHCatRecord(o3,testSD.getObjectInspector(),
+              new HashMap<Integer, Object>());
       System.out.println("FIVE:"+s5.toString());
 
-      LazyHCatRecord s6 = new LazyHCatRecord(s4,hrsd.getObjectInspector());
+      LazyHCatRecord s6 = new LazyHCatRecord(s4,hrsd.getObjectInspector(),
+              new HashMap<Integer, Object>());
       System.out.println("SIX:"+s6.toString());
 
     }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java?rev=1293495&r1=1293494&r2=1293495&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/data/TestLazyHCatRecord.java Sat
Feb 25 00:45:13 2012
@@ -53,32 +53,33 @@ public class TestLazyHCatRecord extends 
   private final long LONG_CONST = 5000000000L;
   private final double DOUBLE_CONST = 3.141592654;
   private final String STRING_CONST = "hello world";
+  private final String PART_CONST = "20120221";
 
 
   public void testGet() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     assertEquals(INT_CONST, ((Integer)r.get(0)).intValue());
     assertEquals(LONG_CONST, ((Long)r.get(1)).longValue());
     assertEquals(DOUBLE_CONST, ((Double)r.get(2)).doubleValue());
     assertEquals(STRING_CONST, (String)r.get(3));
   }
 
-  // TODO This test fails, but it seems to be an error in the schema, not in
-  // LazyHCatRecord.  It get's an NPE inside getPosition.
-  /*
   public void testGetWithName() throws Exception {
     TypeInfo ti = getTypeInfo();
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(ti));
-    HCatSchema schema = HCatSchemaUtils.getHCatSchema(ti);
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(ti),
+    new HashMap<Integer, Object>());
+    HCatSchema schema = HCatSchemaUtils.getHCatSchema(ti)
+                                          .get(0).getStructSubSchema();
     assertEquals(INT_CONST, ((Integer)r.get("an_int", schema)).intValue());
     assertEquals(LONG_CONST, ((Long)r.get("a_long", schema)).longValue());
     assertEquals(DOUBLE_CONST, ((Double)r.get("a_double", schema)).doubleValue());
     assertEquals(STRING_CONST, (String)r.get("a_string", schema));
   }
-  */
 
   public void testGetAll() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     List<Object> list = r.getAll();
     assertEquals(INT_CONST, ((Integer)list.get(0)).intValue());
     assertEquals(LONG_CONST, ((Long)list.get(1)).longValue());
@@ -87,7 +88,8 @@ public class TestLazyHCatRecord extends 
   }
 
   public void testSet() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     boolean sawException = false;
     try {
       r.set(3, "Mary had a little lamb");
@@ -98,12 +100,14 @@ public class TestLazyHCatRecord extends 
   }
 
   public void testSize() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     assertEquals(4, r.size());
   }
 
   public void testReadFields() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     boolean sawException = false;
     try {
       r.readFields(null);
@@ -114,7 +118,8 @@ public class TestLazyHCatRecord extends 
   }
 
   public void testWrite() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     boolean sawException = false;
     try {
       r.write(null);
@@ -125,7 +130,8 @@ public class TestLazyHCatRecord extends 
   }
 
   public void testSetWithName() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     boolean sawException = false;
     try {
       r.set("fred", null, "bob");
@@ -136,7 +142,8 @@ public class TestLazyHCatRecord extends 
   }
 
   public void testRemove() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     boolean sawException = false;
     try {
       r.remove(0);
@@ -147,7 +154,8 @@ public class TestLazyHCatRecord extends 
   }
 
   public void testCopy() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector());
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>());
     boolean sawException = false;
     try {
       r.copy(null);
@@ -157,6 +165,66 @@ public class TestLazyHCatRecord extends 
     assertTrue(sawException);
   }
 
+  public void testGetWritable() throws Exception {
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        new HashMap<Integer, Object>()).getWritable();
+    assertEquals(INT_CONST, ((Integer)r.get(0)).intValue());
+    assertEquals(LONG_CONST, ((Long)r.get(1)).longValue());
+    assertEquals(DOUBLE_CONST, ((Double)r.get(2)).doubleValue());
+    assertEquals(STRING_CONST, (String)r.get(3));
+    assertEquals("org.apache.hcatalog.data.DefaultHCatRecord", r.getClass().getName());
+  }
+
+  public void testGetPartitioned() throws Exception {
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        getPartCols());
+    assertEquals(INT_CONST, ((Integer)r.get(0)).intValue());
+    assertEquals(LONG_CONST, ((Long)r.get(1)).longValue());
+    assertEquals(DOUBLE_CONST, ((Double)r.get(2)).doubleValue());
+    assertEquals(STRING_CONST, (String)r.get(3));
+    assertEquals(PART_CONST, (String)r.get(4));
+  }
+
+  public void testGetWithNamePartitioned() throws Exception {
+    TypeInfo ti = getTypeInfo();
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(ti),
+        getPartCols());
+    HCatSchema schema = HCatSchemaUtils.getHCatSchema(ti)
+                                          .get(0).getStructSubSchema();
+    assertEquals(INT_CONST, ((Integer)r.get("an_int", schema)).intValue());
+    assertEquals(LONG_CONST, ((Long)r.get("a_long", schema)).longValue());
+    assertEquals(DOUBLE_CONST, ((Double)r.get("a_double", schema)).doubleValue());
+    assertEquals(STRING_CONST, (String)r.get("a_string", schema));
+  }
+
+  public void testGetAllPartitioned() throws Exception {
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        getPartCols());
+    List<Object> list = r.getAll();
+    assertEquals(INT_CONST, ((Integer)list.get(0)).intValue());
+    assertEquals(LONG_CONST, ((Long)list.get(1)).longValue());
+    assertEquals(DOUBLE_CONST, ((Double)list.get(2)).doubleValue());
+    assertEquals(STRING_CONST, (String)list.get(3));
+    assertEquals(PART_CONST, (String)r.get(4));
+  }
+
+  public void testSizePartitioned() throws Exception {
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        getPartCols());
+    assertEquals(5, r.size());
+  }
+
+  public void testGetWritablePartitioned() throws Exception {
+    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector(),
+        getPartCols()).getWritable();
+    assertEquals(INT_CONST, ((Integer)r.get(0)).intValue());
+    assertEquals(LONG_CONST, ((Long)r.get(1)).longValue());
+    assertEquals(DOUBLE_CONST, ((Double)r.get(2)).doubleValue());
+    assertEquals(STRING_CONST, (String)r.get(3));
+    assertEquals(PART_CONST, (String)r.get(4));
+    assertEquals("org.apache.hcatalog.data.DefaultHCatRecord", r.getClass().getName());
+  }
+
   private HCatRecord getHCatRecord() throws Exception {
 
     List<Object> rec_1 = new ArrayList<Object>(4);
@@ -185,22 +253,19 @@ public class TestLazyHCatRecord extends 
 
   }
 
-  public void testGetWritable() throws Exception {
-    HCatRecord r = new LazyHCatRecord(getHCatRecord(), getObjectInspector()).getWritable();
-    assertEquals(INT_CONST, ((Integer)r.get(0)).intValue());
-    assertEquals(LONG_CONST, ((Long)r.get(1)).longValue());
-    assertEquals(DOUBLE_CONST, ((Double)r.get(2)).doubleValue());
-    assertEquals(STRING_CONST, (String)r.get(3));
-    assertEquals("org.apache.hcatalog.data.DefaultHCatRecord", r.getClass().getName());
-  }
-
-
   private ObjectInspector getObjectInspector(TypeInfo ti) throws Exception {
-    return HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector((StructTypeInfo)ti);
+    return HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(
+        (StructTypeInfo)ti);
   }
 
   private ObjectInspector getObjectInspector() throws Exception {
-    return
-      HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector((StructTypeInfo)getTypeInfo());
+    return HCatRecordObjectInspectorFactory.getHCatRecordObjectInspector(
+          (StructTypeInfo)getTypeInfo());
+  }
+
+  private Map<Integer, Object> getPartCols() {
+    Map<Integer, Object> pc = new HashMap<Integer, Object>(1);
+    pc.put(4, PART_CONST);
+    return pc;
   }
 }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java?rev=1293495&r1=1293494&r2=1293495&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
(original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatHiveCompatibility.java
Sat Feb 25 00:45:13 2012
@@ -91,7 +91,6 @@ public class TestHCatHiveCompatibility e
 
     // assert that the table created has no hcat instrumentation, and that we're still able
to read it.
     Table table = client.getTable("default", "junit_unparted_noisd");
-    assertFalse(table.getParameters().containsKey(HCatConstants.HCAT_ISD_CLASS));
     assertTrue(table.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
 
     PigServer server = new PigServer(ExecType.LOCAL, props);
@@ -115,7 +114,6 @@ public class TestHCatHiveCompatibility e
 
     // assert that the table created still has no hcat instrumentation
     Table table2 = client.getTable("default", "junit_unparted_noisd");
-    assertFalse(table2.getParameters().containsKey(HCatConstants.HCAT_ISD_CLASS));
     assertTrue(table2.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
 
     driver.run("drop table junit_unparted_noisd");
@@ -133,7 +131,6 @@ public class TestHCatHiveCompatibility e
     // assert that the table created has no hcat instrumentation, and that we're still able
to read it.
     Table table = client.getTable("default", "junit_parted_noisd");
 
-    assertFalse(table.getParameters().containsKey(HCatConstants.HCAT_ISD_CLASS));
     assertTrue(table.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
 
     PigServer server = new PigServer(ExecType.LOCAL, props);
@@ -158,14 +155,12 @@ public class TestHCatHiveCompatibility e
 
     // assert that the table created still has no hcat instrumentation
     Table table2 = client.getTable("default", "junit_parted_noisd");
-    assertFalse(table2.getParameters().containsKey(HCatConstants.HCAT_ISD_CLASS));
     assertTrue(table2.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
 
     // assert that there is one partition present, and it had hcat instrumentation inserted
when it was created.
     Partition ptn = client.getPartition("default", "junit_parted_noisd", Arrays.asList("42"));
 
     assertNotNull(ptn);
-    assertTrue(ptn.getParameters().containsKey(HCatConstants.HCAT_ISD_CLASS));
     assertTrue(ptn.getSd().getInputFormat().equals(HCatConstants.HIVE_RCFILE_IF_CLASS));
     driver.run("drop table junit_unparted_noisd");
   }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java?rev=1293495&r1=1293494&r2=1293495&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
(original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/mapreduce/TestHCatOutputFormat.java
Sat Feb 25 00:45:13 2012
@@ -161,8 +161,7 @@ public class TestHCatOutputFormat extend
     Partition part = client.getPartition(dbName, tblName, Arrays.asList("p1"));
     assertNotNull(part);
 
-    StorerInfo storer = InitializeInput.extractStorerInfo(part.getSd(),part.getParameters());
-    assertEquals(storer.getInputSDClass(), "testInputClass");
+    StorerInfo storer = InternalUtil.extractStorerInfo(part.getSd(),part.getParameters());
     assertEquals(storer.getProperties().get("hcat.testarg"), "testArgValue");
     assertTrue(part.getSd().getLocation().indexOf("p1") != -1);
   }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java?rev=1293495&r1=1293494&r2=1293495&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
(original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/pig/TestHCatLoaderComplexSchema.java
Sat Feb 25 00:45:13 2012
@@ -178,7 +178,6 @@ public class TestHCatLoaderComplexSchema
   private void verifyWriteRead(String tablename, String pigSchema, String tableSchema, List<Tuple>
data, boolean provideSchemaToStorer)
       throws IOException, CommandNeedRetryException, ExecException, FrontendException {
     MockLoader.setData(tablename+"Input", data);
-
     try {
       createTable(tablename, tableSchema);
       PigServer server = new PigServer(ExecType.LOCAL, props);
@@ -188,13 +187,14 @@ public class TestHCatLoaderComplexSchema
       server.registerQuery("STORE A into '"+tablename+"' using org.apache.hcatalog.pig.HCatStorer("
           + (provideSchemaToStorer ? "'', '"+pigSchema+"'" : "")
           + ");");
+      
       ExecJob execJob = server.executeBatch().get(0);
       if (!execJob.getStatistics().isSuccessful()) {
         throw new RuntimeException("Import failed", execJob.getException());
       }
-
       // test that schema was loaded correctly
       server.registerQuery("X = load '"+tablename+"' using org.apache.hcatalog.pig.HCatLoader();");
+      server.dumpSchema("X");
       Iterator<Tuple> it = server.openIterator("X");
       int i = 0;
       while (it.hasNext()) {
@@ -281,8 +281,8 @@ public class TestHCatLoaderComplexSchema
 
       data.add(t);
     }
-    verifyWriteRead("testMapWithComplexData", pigSchema, tableSchema, data, true);
-    verifyWriteRead("testMapWithComplexData2", pigSchema, tableSchema, data, false);
+  verifyWriteRead("testMapWithComplexData", pigSchema, tableSchema, data, true);
+  verifyWriteRead("testMapWithComplexData2", pigSchema, tableSchema, data, false);
 
   }
  }



Mime
View raw message