incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tra...@apache.org
Subject svn commit: r1383152 [14/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ ...
Date Mon, 10 Sep 2012 23:29:03 GMT
Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderMaster.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderMaster.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderMaster.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderMaster.java Mon Sep 10 23:28:55 2012
@@ -37,34 +37,34 @@ import org.apache.hcatalog.data.transfer
 
 public class DataReaderMaster {
 
-	public static void main(String[] args) throws FileNotFoundException, IOException {
+    public static void main(String[] args) throws FileNotFoundException, IOException {
 
-		// This config contains all the configuration that master node wants to provide
-		// to the HCatalog.
-		Properties externalConfigs = new Properties();
-		externalConfigs.load(new FileReader(args[0]));
-		Map<String,String> config = new HashMap<String, String>();
-		
-		for (Entry<Object, Object> kv : externalConfigs.entrySet()){
-			config.put((String)kv.getKey(), (String)kv.getValue());
-		}
-		
-		// This piece of code runs in master node and gets necessary context.
-		ReaderContext context = runsInMaster(config);
-
-		ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(new File(args[1])));
-		oos.writeObject(context);
-		oos.flush();
-		oos.close();
-		// Master node will serialize readercontext and will make it available  at slaves.
-	}
-
-	private static ReaderContext runsInMaster(Map<String,String> config) throws HCatException {
-
-		ReadEntity.Builder builder = new ReadEntity.Builder();
-		ReadEntity entity = builder.withTable(config.get("table")).build();
-		HCatReader reader = DataTransferFactory.getHCatReader(entity, config);
-		ReaderContext cntxt = reader.prepareRead();
-		return cntxt;
-	}
+        // This config contains all the configuration that master node wants to provide
+        // to the HCatalog.
+        Properties externalConfigs = new Properties();
+        externalConfigs.load(new FileReader(args[0]));
+        Map<String, String> config = new HashMap<String, String>();
+
+        for (Entry<Object, Object> kv : externalConfigs.entrySet()) {
+            config.put((String) kv.getKey(), (String) kv.getValue());
+        }
+
+        // This piece of code runs in master node and gets necessary context.
+        ReaderContext context = runsInMaster(config);
+
+        ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(new File(args[1])));
+        oos.writeObject(context);
+        oos.flush();
+        oos.close();
+        // Master node will serialize readercontext and will make it available  at slaves.
+    }
+
+    private static ReaderContext runsInMaster(Map<String, String> config) throws HCatException {
+
+        ReadEntity.Builder builder = new ReadEntity.Builder();
+        ReadEntity entity = builder.withTable(config.get("table")).build();
+        HCatReader reader = DataTransferFactory.getHCatReader(entity, config);
+        ReaderContext cntxt = reader.prepareRead();
+        return cntxt;
+    }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderSlave.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderSlave.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderSlave.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataReaderSlave.java Mon Sep 10 23:28:55 2012
@@ -34,28 +34,28 @@ import org.apache.hcatalog.data.transfer
 
 public class DataReaderSlave {
 
-	public static void main(String[] args) throws IOException, ClassNotFoundException {
-		
-		ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(args[0])));
-		ReaderContext cntxt = (ReaderContext) ois.readObject();
-		ois.close();
-		
-		String[] inpSlitsToRead = args[1].split(",");
-		List<InputSplit> splits = cntxt.getSplits();
-		
-		for (int i = 0; i < inpSlitsToRead.length; i++){
-			InputSplit split = splits.get(Integer.parseInt(inpSlitsToRead[i]));
-			HCatReader reader = DataTransferFactory.getHCatReader(split, cntxt.getConf());
-			Iterator<HCatRecord> itr = reader.read();
-			File f = new File(args[2]+"-"+i);
-			f.delete();
-			BufferedWriter outFile = new BufferedWriter(new FileWriter(f)); 
-			while(itr.hasNext()){
-				String rec = itr.next().toString().replaceFirst("\\s+$", "");
-				System.err.println(rec);
-				outFile.write(rec+"\n");
-			}
-			outFile.close();
-		}
-	}
+    public static void main(String[] args) throws IOException, ClassNotFoundException {
+
+        ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(args[0])));
+        ReaderContext cntxt = (ReaderContext) ois.readObject();
+        ois.close();
+
+        String[] inpSlitsToRead = args[1].split(",");
+        List<InputSplit> splits = cntxt.getSplits();
+
+        for (int i = 0; i < inpSlitsToRead.length; i++) {
+            InputSplit split = splits.get(Integer.parseInt(inpSlitsToRead[i]));
+            HCatReader reader = DataTransferFactory.getHCatReader(split, cntxt.getConf());
+            Iterator<HCatRecord> itr = reader.read();
+            File f = new File(args[2] + "-" + i);
+            f.delete();
+            BufferedWriter outFile = new BufferedWriter(new FileWriter(f));
+            while (itr.hasNext()) {
+                String rec = itr.next().toString().replaceFirst("\\s+$", "");
+                System.err.println(rec);
+                outFile.write(rec + "\n");
+            }
+            outFile.close();
+        }
+    }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterMaster.java Mon Sep 10 23:28:55 2012
@@ -39,57 +39,57 @@ import org.apache.hcatalog.data.transfer
 
 public class DataWriterMaster {
 
-	public static void main(String[] args) throws FileNotFoundException, IOException, ClassNotFoundException {
+    public static void main(String[] args) throws FileNotFoundException, IOException, ClassNotFoundException {
 
-		// This config contains all the configuration that master node wants to provide
-		// to the HCatalog.
-		Properties externalConfigs = new Properties();
-		externalConfigs.load(new FileReader(args[0]));
-		Map<String,String> config = new HashMap<String, String>();
-
-		for (Entry<Object, Object> kv : externalConfigs.entrySet()){
-			System.err.println("k: " + kv.getKey() + "\t v: " + kv.getValue());
-			config.put((String)kv.getKey(), (String)kv.getValue());
-		}
-
-		if(args.length == 3 && "commit".equalsIgnoreCase(args[2])){
-			// Then, master commits if everything goes well.
-			ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(args[1])));
-			WriterContext cntxt = (WriterContext)ois.readObject();
-			commit(config,true, cntxt);		
-			System.exit(0);
-		}
-		// This piece of code runs in master node and gets necessary context.
-		WriterContext cntxt = runsInMaster(config);
-		
-		
-		// Master node will serialize writercontext and will make it available at slaves.
-		File f = new File(args[1]);
-		f.delete();
-		ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(f));
-		oos.writeObject(cntxt);
-		oos.flush();
-		oos.close();
-	}
-
-	private static WriterContext runsInMaster(Map<String, String> config) throws HCatException {
-
-		WriteEntity.Builder builder = new WriteEntity.Builder();
-		WriteEntity entity = builder.withTable(config.get("table")).build();
-		HCatWriter writer = DataTransferFactory.getHCatWriter(entity, config);
-		WriterContext info = writer.prepareWrite();
-		return info;
-	}
-
-	private static void commit(Map<String, String> config, boolean status, WriterContext cntxt) throws HCatException {
-
-		WriteEntity.Builder builder = new WriteEntity.Builder();
-		WriteEntity entity = builder.withTable(config.get("table")).build();
-		HCatWriter writer = DataTransferFactory.getHCatWriter(entity, config);
-		if(status){
-			writer.commit(cntxt);			
-		} else {
-			writer.abort(cntxt);
-		}
-	} 
+        // This config contains all the configuration that master node wants to provide
+        // to the HCatalog.
+        Properties externalConfigs = new Properties();
+        externalConfigs.load(new FileReader(args[0]));
+        Map<String, String> config = new HashMap<String, String>();
+
+        for (Entry<Object, Object> kv : externalConfigs.entrySet()) {
+            System.err.println("k: " + kv.getKey() + "\t v: " + kv.getValue());
+            config.put((String) kv.getKey(), (String) kv.getValue());
+        }
+
+        if (args.length == 3 && "commit".equalsIgnoreCase(args[2])) {
+            // Then, master commits if everything goes well.
+            ObjectInputStream ois = new ObjectInputStream(new FileInputStream(new File(args[1])));
+            WriterContext cntxt = (WriterContext) ois.readObject();
+            commit(config, true, cntxt);
+            System.exit(0);
+        }
+        // This piece of code runs in master node and gets necessary context.
+        WriterContext cntxt = runsInMaster(config);
+
+
+        // Master node will serialize writercontext and will make it available at slaves.
+        File f = new File(args[1]);
+        f.delete();
+        ObjectOutputStream oos = new ObjectOutputStream(new FileOutputStream(f));
+        oos.writeObject(cntxt);
+        oos.flush();
+        oos.close();
+    }
+
+    private static WriterContext runsInMaster(Map<String, String> config) throws HCatException {
+
+        WriteEntity.Builder builder = new WriteEntity.Builder();
+        WriteEntity entity = builder.withTable(config.get("table")).build();
+        HCatWriter writer = DataTransferFactory.getHCatWriter(entity, config);
+        WriterContext info = writer.prepareWrite();
+        return info;
+    }
+
+    private static void commit(Map<String, String> config, boolean status, WriterContext cntxt) throws HCatException {
+
+        WriteEntity.Builder builder = new WriteEntity.Builder();
+        WriteEntity entity = builder.withTable(config.get("table")).build();
+        HCatWriter writer = DataTransferFactory.getHCatWriter(entity, config);
+        if (status) {
+            writer.commit(cntxt);
+        } else {
+            writer.abort(cntxt);
+        }
+    }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterSlave.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterSlave.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterSlave.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/DataWriterSlave.java Mon Sep 10 23:28:55 2012
@@ -36,51 +36,51 @@ import org.apache.hcatalog.data.transfer
 
 public class DataWriterSlave {
 
-	public static void main(String[] args) throws FileNotFoundException, IOException, ClassNotFoundException {
-		
-		ObjectInputStream ois = new ObjectInputStream(new FileInputStream(args[0]));
-		WriterContext cntxt = (WriterContext) ois.readObject();
-		ois.close();
-		
-		HCatWriter writer = DataTransferFactory.getHCatWriter(cntxt);
-		writer.write(new HCatRecordItr(args[1]));
-		
-	}
-	
-	private static class HCatRecordItr implements Iterator<HCatRecord> {
-
-		BufferedReader reader;
-		String curLine;
-		
-		public HCatRecordItr(String fileName) throws FileNotFoundException {
-			reader = new BufferedReader(new FileReader(new File(fileName)));
-		}
-		
-		@Override
-		public boolean hasNext() {
-			try {
-				curLine = reader.readLine();
-			} catch (IOException e) {
-				e.printStackTrace();
-			}
-			return null == curLine ? false : true;
-		}
-
-		@Override
-		public HCatRecord next() {
-
-			String[] fields = curLine.split("\t");
-			List<Object> data = new ArrayList<Object>(3);
-			data.add(fields[0]);
-			data.add(Integer.parseInt(fields[1]));
-			data.add(Double.parseDouble(fields[2]));
-			return new DefaultHCatRecord(data);
-		}
-
-		@Override
-		public void remove() {
-			// TODO Auto-generated method stub
-			
-		}
-	}
+    public static void main(String[] args) throws FileNotFoundException, IOException, ClassNotFoundException {
+
+        ObjectInputStream ois = new ObjectInputStream(new FileInputStream(args[0]));
+        WriterContext cntxt = (WriterContext) ois.readObject();
+        ois.close();
+
+        HCatWriter writer = DataTransferFactory.getHCatWriter(cntxt);
+        writer.write(new HCatRecordItr(args[1]));
+
+    }
+
+    private static class HCatRecordItr implements Iterator<HCatRecord> {
+
+        BufferedReader reader;
+        String curLine;
+
+        public HCatRecordItr(String fileName) throws FileNotFoundException {
+            reader = new BufferedReader(new FileReader(new File(fileName)));
+        }
+
+        @Override
+        public boolean hasNext() {
+            try {
+                curLine = reader.readLine();
+            } catch (IOException e) {
+                e.printStackTrace();
+            }
+            return null == curLine ? false : true;
+        }
+
+        @Override
+        public HCatRecord next() {
+
+            String[] fields = curLine.split("\t");
+            List<Object> data = new ArrayList<Object>(3);
+            data.add(fields[0]);
+            data.add(Integer.parseInt(fields[1]));
+            data.add(Double.parseDouble(fields[2]));
+            return new DefaultHCatRecord(data);
+        }
+
+        @Override
+        public void remove() {
+            // TODO Auto-generated method stub
+
+        }
+    }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/GroupByAge.java Mon Sep 10 23:28:55 2012
@@ -45,7 +45,7 @@ import org.apache.hcatalog.mapreduce.Out
  * table. It performs a group by on the first column and a SUM operation on the
  * other columns. This is to simulate a typical operation in a map reduce
  * program to test that hcat hands the right data to the map reduce program
- * 
+ *
  * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat
  * jar> The <tab|ctrla> argument controls the output delimiter The hcat jar
  * location should be specified as file://<full path to jar>
@@ -53,40 +53,40 @@ import org.apache.hcatalog.mapreduce.Out
 public class GroupByAge extends Configured implements Tool {
 
     public static class Map extends
-            Mapper<WritableComparable, HCatRecord, IntWritable, IntWritable> {
+        Mapper<WritableComparable, HCatRecord, IntWritable, IntWritable> {
 
         int age;
-        
+
         @Override
         protected void map(
-                WritableComparable key,
-                HCatRecord value,
-                org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, IntWritable, IntWritable>.Context context)
-                throws IOException, InterruptedException {
+            WritableComparable key,
+            HCatRecord value,
+            org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, IntWritable, IntWritable>.Context context)
+            throws IOException, InterruptedException {
             age = (Integer) value.get(1);
             context.write(new IntWritable(age), new IntWritable(1));
         }
     }
-    
+
     public static class Reduce extends Reducer<IntWritable, IntWritable,
-    WritableComparable, HCatRecord> {
+        WritableComparable, HCatRecord> {
 
 
-      @Override
-      protected void reduce(IntWritable key, java.lang.Iterable<IntWritable>
-        values, org.apache.hadoop.mapreduce.Reducer<IntWritable,IntWritable,WritableComparable,HCatRecord>.Context context)
-        throws IOException ,InterruptedException {
-          int sum = 0;
-          Iterator<IntWritable> iter = values.iterator();
-          while (iter.hasNext()) {
-              sum++;
-              iter.next();
-          }
-          HCatRecord record = new DefaultHCatRecord(2);
-          record.set(0, key.get());
-          record.set(1, sum);
-          
-          context.write(null, record);
+        @Override
+        protected void reduce(IntWritable key, java.lang.Iterable<IntWritable>
+            values, org.apache.hadoop.mapreduce.Reducer<IntWritable, IntWritable, WritableComparable, HCatRecord>.Context context)
+            throws IOException, InterruptedException {
+            int sum = 0;
+            Iterator<IntWritable> iter = values.iterator();
+            while (iter.hasNext()) {
+                sum++;
+                iter.next();
+            }
+            HCatRecord record = new DefaultHCatRecord(2);
+            record.set(0, key.get());
+            record.set(1, sum);
+
+            context.write(null, record);
         }
     }
 
@@ -100,12 +100,12 @@ public class GroupByAge extends Configur
         String dbName = null;
 
         String principalID = System
-                .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+            .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
         if (principalID != null)
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "GroupByAge");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null));
+            inputTableName, null));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);
@@ -117,10 +117,10 @@ public class GroupByAge extends Configur
         job.setOutputKeyClass(WritableComparable.class);
         job.setOutputValueClass(DefaultHCatRecord.class);
         HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
-                outputTableName, null));
+            outputTableName, null));
         HCatSchema s = HCatOutputFormat.getTableSchema(job);
         System.err.println("INFO: output schema explicitly set for writing:"
-                + s);
+            + s);
         HCatOutputFormat.setSchema(job, s);
         job.setOutputFormatClass(HCatOutputFormat.class);
         return (job.waitForCompletion(true) ? 0 : 1);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HBaseReadWrite.java Mon Sep 10 23:28:55 2012
@@ -48,7 +48,7 @@ import org.apache.hcatalog.mapreduce.Out
  * table. It performs a group by on the first column and a SUM operation on the
  * other columns. This is to simulate a typical operation in a map reduce
  * program to test that hcat hands the right data to the map reduce program
- * 
+ *
  * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat
  * jar> The <tab|ctrla> argument controls the output delimiter The hcat jar
  * location should be specified as file://<full path to jar>
@@ -56,7 +56,7 @@ import org.apache.hcatalog.mapreduce.Out
 public class HBaseReadWrite extends Configured implements Tool {
 
     public static class HBaseWriteMap extends
-            Mapper<LongWritable, Text, Text, Text> {
+        Mapper<LongWritable, Text, Text, Text> {
 
         String name;
         String age;
@@ -64,21 +64,21 @@ public class HBaseReadWrite extends Conf
 
         @Override
         protected void map(
-                LongWritable key,
-                Text value,
-                org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, Text, Text>.Context context)
-                throws IOException, InterruptedException {
+            LongWritable key,
+            Text value,
+            org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, Text, Text>.Context context)
+            throws IOException, InterruptedException {
             String line = value.toString();
             String[] tokens = line.split("\t");
             name = tokens[0];
-            
+
             context.write(new Text(name), value);
         }
     }
-    
+
 
     public static class HBaseWriteReduce extends
-            Reducer<Text, Text, WritableComparable, HCatRecord> {
+        Reducer<Text, Text, WritableComparable, HCatRecord> {
 
         String name;
         String age;
@@ -86,7 +86,7 @@ public class HBaseReadWrite extends Conf
 
         @Override
         protected void reduce(Text key, Iterable<Text> values, Context context)
-                throws IOException, InterruptedException {
+            throws IOException, InterruptedException {
             name = key.toString();
             int count = 0;
             double sum = 0;
@@ -96,21 +96,21 @@ public class HBaseReadWrite extends Conf
                 name = tokens[0];
                 age = tokens[1];
                 gpa = tokens[2];
-                
+
                 count++;
                 sum += Double.parseDouble(gpa.toString());
             }
-            
+
             HCatRecord record = new DefaultHCatRecord(2);
             record.set(0, name);
             record.set(1, Double.toString(sum));
-            
+
             context.write(null, record);
         }
     }
 
     public static class HBaseReadMap extends
-            Mapper<WritableComparable, HCatRecord, Text, Text> {
+        Mapper<WritableComparable, HCatRecord, Text, Text> {
 
         String name;
         String age;
@@ -118,16 +118,16 @@ public class HBaseReadWrite extends Conf
 
         @Override
         protected void map(
-                WritableComparable key,
-                HCatRecord value,
-                org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, Text, Text>.Context context)
-                throws IOException, InterruptedException {
+            WritableComparable key,
+            HCatRecord value,
+            org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, Text, Text>.Context context)
+            throws IOException, InterruptedException {
             name = (String) value.get(0);
             gpa = (String) value.get(1);
             context.write(new Text(name), new Text(gpa));
         }
     }
-    
+
 
     public int run(String[] args) throws Exception {
         Configuration conf = getConf();
@@ -140,13 +140,13 @@ public class HBaseReadWrite extends Conf
         String dbName = null;
 
         String principalID = System
-                .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+            .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
         if (principalID != null)
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         conf.set("hcat.hbase.output.bulkMode", "false");
         Job job = new Job(conf, "HBaseWrite");
         FileInputFormat.setInputPaths(job, inputDir);
-        
+
         job.setInputFormatClass(TextInputFormat.class);
         job.setOutputFormatClass(HCatOutputFormat.class);
         job.setJarByClass(HBaseReadWrite.class);
@@ -157,16 +157,16 @@ public class HBaseReadWrite extends Conf
         job.setOutputKeyClass(WritableComparable.class);
         job.setOutputValueClass(DefaultHCatRecord.class);
         HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
-                tableName, null));
-        
+            tableName, null));
+
         boolean succ = job.waitForCompletion(true);
-        
+
         if (!succ) return 1;
-        
+
         job = new Job(conf, "HBaseRead");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName, tableName,
-                null));
-        
+            null));
+
         job.setInputFormatClass(HCatInputFormat.class);
         job.setOutputFormatClass(TextOutputFormat.class);
         job.setJarByClass(HBaseReadWrite.class);
@@ -175,11 +175,11 @@ public class HBaseReadWrite extends Conf
         job.setOutputValueClass(Text.class);
         job.setNumReduceTasks(0);
         TextOutputFormat.setOutputPath(job, new Path(outputDir));
-        
+
         succ = job.waitForCompletion(true);
-        
+
         if (!succ) return 2;
-        
+
         return 0;
     }
 

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTestDriver.java Mon Sep 10 23:28:55 2012
@@ -25,36 +25,35 @@ import org.apache.hadoop.util.ProgramDri
  * human-readable description.
  */
 public class HCatTestDriver {
-  
-  public static void main(String argv[]){
-    int exitCode = -1;
-    ProgramDriver pgd = new ProgramDriver();
-    try {
-      pgd.addClass("typedatacheck", TypeDataCheck.class, 
-                   "A map/reduce program that checks the type of each field and" +
-                   " outputs the entire table (to test hcat).");
-      pgd.addClass("sumnumbers", SumNumbers.class, 
-      "A map/reduce program that performs a group by on the first column and a " +
-      "SUM operation on the other columns of the \"numbers\" table.");
-      pgd.addClass("storenumbers", StoreNumbers.class, "A map/reduce program that " +
-      		"reads from the \"numbers\" table and adds 10 to each fields and writes " +
-      				"to the \"numbers_partitioned\" table into the datestamp=20100101 " +
-      				"partition OR the \"numbers_empty_initially\" table based on a " +
-      				"cmdline arg");
-      pgd.addClass("storecomplex", StoreComplex.class, "A map/reduce program that " +
-              "reads from the \"complex\" table and stores as-is into the " +
-              "\"complex_empty_initially\" table.");
-      pgd.addClass("storedemo", StoreDemo.class, "demo prog.");
-      pgd.driver(argv);
-      
-      // Success
-      exitCode = 0;
+
+    public static void main(String argv[]) {
+        int exitCode = -1;
+        ProgramDriver pgd = new ProgramDriver();
+        try {
+            pgd.addClass("typedatacheck", TypeDataCheck.class,
+                "A map/reduce program that checks the type of each field and" +
+                    " outputs the entire table (to test hcat).");
+            pgd.addClass("sumnumbers", SumNumbers.class,
+                "A map/reduce program that performs a group by on the first column and a " +
+                    "SUM operation on the other columns of the \"numbers\" table.");
+            pgd.addClass("storenumbers", StoreNumbers.class, "A map/reduce program that " +
+                "reads from the \"numbers\" table and adds 10 to each fields and writes " +
+                "to the \"numbers_partitioned\" table into the datestamp=20100101 " +
+                "partition OR the \"numbers_empty_initially\" table based on a " +
+                "cmdline arg");
+            pgd.addClass("storecomplex", StoreComplex.class, "A map/reduce program that " +
+                "reads from the \"complex\" table and stores as-is into the " +
+                "\"complex_empty_initially\" table.");
+            pgd.addClass("storedemo", StoreDemo.class, "demo prog.");
+            pgd.driver(argv);
+
+            // Success
+            exitCode = 0;
+        } catch (Throwable e) {
+            e.printStackTrace();
+        }
+
+        System.exit(exitCode);
     }
-    catch(Throwable e){
-      e.printStackTrace();
-    }
-    
-    System.exit(exitCode);
-  }
 }
 	

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheck.java Mon Sep 10 23:28:55 2012
@@ -33,25 +33,25 @@ import org.apache.pig.impl.util.Utils;
 /**
  * This UDF can be used to check that a tuple presented by HCatLoader has the
  * right types for the fields
- * 
+ *
  * Usage is :
- * 
+ *
  * register testudf.jar;
  * a = load 'numbers' using HCatLoader(...);
  * b = foreach a generate HCatTypeCheck('intnum1000:int,id:int,intnum5:int,intnum100:int,intnum:int,longnum:long,floatnum:float,doublenum:double', *);
  * store b into 'output';
- * 
+ *
  * The schema string (the first argument to the UDF) is of the form one would provide in a 
  * pig load statement.
- * 
+ *
  * The output should only contain the value '1' in all rows. (This UDF returns
  * the integer value 1 if all fields have the right type, else throws IOException)
- *  
+ *
  */
 public class HCatTypeCheck extends EvalFunc<Integer> {
 
     static HashMap<Byte, Class<?>> typeMap = new HashMap<Byte, Class<?>>();
-    
+
     @Override
     public Integer exec(Tuple input) throws IOException {
         String schemaStr = (String) input.get(0);
@@ -61,13 +61,13 @@ public class HCatTypeCheck extends EvalF
         } catch (Exception e) {
             throw new IOException(e);
         }
-        for(int i = 0; i < s.size(); i++) {
-            check(s.getField(i).type, input.get(i+1)); // input.get(i+1) since input.get(0) is the schema;
+        for (int i = 0; i < s.size(); i++) {
+            check(s.getField(i).type, input.get(i + 1)); // input.get(i+1) since input.get(0) is the schema;
         }
         return 1;
     }
-    
-    static {    
+
+    static {
         typeMap.put(DataType.INTEGER, Integer.class);
         typeMap.put(DataType.LONG, Long.class);
         typeMap.put(DataType.FLOAT, Float.class);
@@ -77,34 +77,33 @@ public class HCatTypeCheck extends EvalF
         typeMap.put(DataType.MAP, Map.class);
         typeMap.put(DataType.BAG, DataBag.class);
     }
-    
-    
-    
+
+
     private void die(String expectedType, Object o) throws IOException {
-        throw new IOException("Expected " + expectedType + ", got " +  
-              o.getClass().getName());
+        throw new IOException("Expected " + expectedType + ", got " +
+            o.getClass().getName());
     }
-    
-    
+
+
     private String check(Byte type, Object o) throws IOException {
-        if(o == null) {
+        if (o == null) {
             return "";
         }
-        if(check(typeMap.get(type), o)) {
-            if(type.equals(DataType.MAP)) {
+        if (check(typeMap.get(type), o)) {
+            if (type.equals(DataType.MAP)) {
                 Map<String, String> m = (Map<String, String>) o;
                 check(m);
-            } else if(type.equals(DataType.BAG)) {
+            } else if (type.equals(DataType.BAG)) {
                 DataBag bg = (DataBag) o;
                 for (Tuple tuple : bg) {
                     Map<String, String> m = (Map<String, String>) tuple.get(0);
                     check(m);
                 }
-            } else if(type.equals(DataType.TUPLE)) {
+            } else if (type.equals(DataType.TUPLE)) {
                 Tuple t = (Tuple) o;
-                if(!check(Integer.class, t.get(0)) ||
-                        !check(String.class, t.get(1)) ||
-                                !check(Double.class, t.get(2))) {
+                if (!check(Integer.class, t.get(0)) ||
+                    !check(String.class, t.get(1)) ||
+                    !check(Double.class, t.get(2))) {
                     die("t:tuple(num:int,str:string,dbl:double)", t);
                 }
             }
@@ -113,26 +112,26 @@ public class HCatTypeCheck extends EvalF
         }
         return o.toString();
     }
-    
+
     /**
-    * @param m
-    * @throws IOException 
-    */
+     * @param m
+     * @throws IOException
+     */
     private void check(Map<String, String> m) throws IOException {
-      for(Entry<String, String> e: m.entrySet()) {
-          // just access key and value to ensure they are correct
-          if(!check(String.class, e.getKey())) {
-              die("String", e.getKey());
-          }
-          if(!check(String.class, e.getValue())) {
-              die("String", e.getValue());
-          }
-      }
-      
+        for (Entry<String, String> e : m.entrySet()) {
+            // just access key and value to ensure they are correct
+            if (!check(String.class, e.getKey())) {
+                die("String", e.getKey());
+            }
+            if (!check(String.class, e.getValue())) {
+                die("String", e.getValue());
+            }
+        }
+
     }
-    
+
     private boolean check(Class<?> expected, Object actual) {
-        if(actual == null) {
+        if (actual == null) {
             return true;
         }
         return expected.isAssignableFrom(actual.getClass());
@@ -140,11 +139,11 @@ public class HCatTypeCheck extends EvalF
 
     Schema getSchemaFromString(String schemaString) throws Exception {
         /** ByteArrayInputStream stream = new ByteArrayInputStream(schemaString.getBytes()) ;
-        QueryParser queryParser = new QueryParser(stream) ;
-        Schema schema = queryParser.TupleSchema() ;
-        Schema.setSchemaDefaultType(schema, org.apache.pig.data.DataType.BYTEARRAY);
-        return schema;
-        */
+         QueryParser queryParser = new QueryParser(stream) ;
+         Schema schema = queryParser.TupleSchema() ;
+         Schema.setSchemaDefaultType(schema, org.apache.pig.data.DataType.BYTEARRAY);
+         return schema;
+         */
         return Utils.getSchemaFromString(schemaString);
     }
 

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheckHive.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheckHive.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheckHive.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/HCatTypeCheckHive.java Mon Sep 10 23:28:55 2012
@@ -40,101 +40,102 @@ import org.apache.hadoop.hive.serde2.obj
 
 /**
  * A hive udf to check types of the fields read from hcat. A sample hive query which can use this is:
- * 
+ *
  * create temporary function typecheck as 'org.apache.hcatalog.utils.HCatTypeCheckHive';
  * select typecheck('map<string,string>+struct<num:int,str:string,dbl:double>+array<map<string,string>>+int', 
  * mymap, mytuple, bagofmap, rownum) from complex;
- * 
- * 
+ *
+ *
  * The first argument to the UDF is a string representing the schema of the columns in the table. 
  * The columns in the tables are the remaining args to it.
  * The schema specification consists of the types as given by "describe <table>"
  * with each column's type separated from the next column's type by a '+'
- * 
+ *
  * The UDF will throw an exception (and cause the query to fail) if it does not
  * encounter the correct types.
- * 
+ *
  * The output is a string representation of the data , type and hive category.
  * It is not advisable to use this against large dataset since the output would also
  * be large. 
- * 
+ *
  */
 public final class HCatTypeCheckHive extends GenericUDF {
 
-ObjectInspector[] argOIs;
-@Override
-public Object evaluate(DeferredObject[] args) throws HiveException {
-    List<Object> row = new ArrayList<Object>();
-    String typesStr = (String) getJavaObject(args[0].get(), argOIs[0], new ArrayList<Category>());
-    String[] types = typesStr.split("\\+");
-    for(int i = 0; i < types.length; i++) {
-        types[i] = types[i].toLowerCase();
-    }
-    for(int i = 1; i < args.length; i++) {
-        ObjectInspector oi = argOIs[i];
-        List<ObjectInspector.Category> categories = new ArrayList<ObjectInspector.Category>();
-        Object o = getJavaObject(args[i].get(),oi, categories);
-        try {
-            if(o != null) {
-                Util.check(types[i-1], o);
+    ObjectInspector[] argOIs;
+
+    @Override
+    public Object evaluate(DeferredObject[] args) throws HiveException {
+        List<Object> row = new ArrayList<Object>();
+        String typesStr = (String) getJavaObject(args[0].get(), argOIs[0], new ArrayList<Category>());
+        String[] types = typesStr.split("\\+");
+        for (int i = 0; i < types.length; i++) {
+            types[i] = types[i].toLowerCase();
+        }
+        for (int i = 1; i < args.length; i++) {
+            ObjectInspector oi = argOIs[i];
+            List<ObjectInspector.Category> categories = new ArrayList<ObjectInspector.Category>();
+            Object o = getJavaObject(args[i].get(), oi, categories);
+            try {
+                if (o != null) {
+                    Util.check(types[i - 1], o);
+                }
+            } catch (IOException e) {
+                throw new HiveException(e);
             }
-        } catch (IOException e) {
-            throw new HiveException(e);
+            row.add(o == null ? "null" : o);
+            row.add(":" + (o == null ? "null" : o.getClass()) + ":" + categories);
         }
-        row.add(o == null ? "null" : o);
-        row.add(":" + (o == null ? "null" : o.getClass()) + ":" + categories);
+        return row.toString();
     }
-    return row.toString();
-}
 
-private Object getJavaObject(Object o, ObjectInspector oi, List<Category> categories) {
-    if(categories != null) {
-        categories.add(oi.getCategory());
-    }
-    if(oi.getCategory() == ObjectInspector.Category.LIST) {
-        List<?> l = ((ListObjectInspector)oi).getList(o);
-        List<Object> result = new ArrayList<Object>();
-        ObjectInspector elemOI = ((ListObjectInspector)oi).getListElementObjectInspector();
-        for(Object lo : l) {
-            result.add(getJavaObject(lo, elemOI, categories));    
-        }
-        return result;
-    } else if (oi.getCategory() == ObjectInspector.Category.MAP) {
-        Map<?,?> m = ((MapObjectInspector)oi).getMap(o);
-        Map<String, String> result = new HashMap<String, String>();
-        ObjectInspector koi = ((MapObjectInspector)oi).getMapKeyObjectInspector();
-        ObjectInspector voi = ((MapObjectInspector)oi).getMapValueObjectInspector();
-        for(Entry<?,?> e: m.entrySet()) {
-            result.put((String)getJavaObject(e.getKey(), koi, null), 
-                    (String)getJavaObject(e.getValue(), voi, null));
+    private Object getJavaObject(Object o, ObjectInspector oi, List<Category> categories) {
+        if (categories != null) {
+            categories.add(oi.getCategory());
         }
-        return result;
-        
-    } else if (oi.getCategory() == ObjectInspector.Category.STRUCT) {
-        List<Object> s = ((StructObjectInspector)oi).getStructFieldsDataAsList(o);
-        List<? extends StructField> sf = ((StructObjectInspector)oi).getAllStructFieldRefs();
-        List<Object> result = new ArrayList<Object>();
-        for(int i = 0; i < s.size(); i++) {
-            result.add(getJavaObject(s.get(i), sf.get(i).getFieldObjectInspector(), categories));
+        if (oi.getCategory() == ObjectInspector.Category.LIST) {
+            List<?> l = ((ListObjectInspector) oi).getList(o);
+            List<Object> result = new ArrayList<Object>();
+            ObjectInspector elemOI = ((ListObjectInspector) oi).getListElementObjectInspector();
+            for (Object lo : l) {
+                result.add(getJavaObject(lo, elemOI, categories));
+            }
+            return result;
+        } else if (oi.getCategory() == ObjectInspector.Category.MAP) {
+            Map<?, ?> m = ((MapObjectInspector) oi).getMap(o);
+            Map<String, String> result = new HashMap<String, String>();
+            ObjectInspector koi = ((MapObjectInspector) oi).getMapKeyObjectInspector();
+            ObjectInspector voi = ((MapObjectInspector) oi).getMapValueObjectInspector();
+            for (Entry<?, ?> e : m.entrySet()) {
+                result.put((String) getJavaObject(e.getKey(), koi, null),
+                    (String) getJavaObject(e.getValue(), voi, null));
+            }
+            return result;
+
+        } else if (oi.getCategory() == ObjectInspector.Category.STRUCT) {
+            List<Object> s = ((StructObjectInspector) oi).getStructFieldsDataAsList(o);
+            List<? extends StructField> sf = ((StructObjectInspector) oi).getAllStructFieldRefs();
+            List<Object> result = new ArrayList<Object>();
+            for (int i = 0; i < s.size(); i++) {
+                result.add(getJavaObject(s.get(i), sf.get(i).getFieldObjectInspector(), categories));
+            }
+            return result;
+        } else if (oi.getCategory() == ObjectInspector.Category.PRIMITIVE) {
+            return ((PrimitiveObjectInspector) oi).getPrimitiveJavaObject(o);
         }
-        return result;
-    } else if(oi.getCategory() == ObjectInspector.Category.PRIMITIVE) {
-        return ((PrimitiveObjectInspector)oi).getPrimitiveJavaObject(o);
+        throw new RuntimeException("Unexpected error!");
     }
-    throw new RuntimeException("Unexpected error!");
-}
 
-@Override
-public String getDisplayString(String[] arg0) {
-    return null;
-}
+    @Override
+    public String getDisplayString(String[] arg0) {
+        return null;
+    }
 
-@Override
-public ObjectInspector initialize(ObjectInspector[] argOIs)
+    @Override
+    public ObjectInspector initialize(ObjectInspector[] argOIs)
         throws UDFArgumentException {
-    this.argOIs = argOIs;
-    return ObjectInspectorFactory.getReflectionObjectInspector(String.class, 
+        this.argOIs = argOIs;
+        return ObjectInspectorFactory.getReflectionObjectInspector(String.class,
             ObjectInspectorOptions.JAVA);
-}
+    }
 
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadJson.java Mon Sep 10 23:28:55 2012
@@ -43,69 +43,69 @@ import org.apache.hcatalog.mapreduce.Inp
  * table. It performs a group by on the first column and a SUM operation on the
  * other columns. This is to simulate a typical operation in a map reduce program
  * to test that hcat hands the right data to the map reduce program
- * 
+ *
  * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat jar>
-            The <tab|ctrla> argument controls the output delimiter
-            The hcat jar location should be specified as file://<full path to jar>
+ The <tab|ctrla> argument controls the output delimiter
+ The hcat jar location should be specified as file://<full path to jar>
  */
 public class ReadJson extends Configured implements Tool {
-    
-  public static class Map
-       extends Mapper<WritableComparable, HCatRecord, IntWritable, HCatRecord>{
-      
-      String s;
-      Integer i;
-      Double d;
-      
-    @Override
-  protected void map(WritableComparable key, HCatRecord value, 
-          org.apache.hadoop.mapreduce.Mapper<WritableComparable,HCatRecord,
-          IntWritable,HCatRecord>.Context context) 
-    throws IOException ,InterruptedException {
-        s = value.get(0)==null?null:(String)value.get(0);
-        i = value.get(1)==null?null:(Integer)value.get(1);
-        d = value.get(2)==null?null:(Double)value.get(2);
-        
-        HCatRecord record = new DefaultHCatRecord(3);
-        record.set(0, s);
-        record.set(1, i);
-        record.set(2, d);
-        
-        context.write(null, record);
 
+    public static class Map
+        extends Mapper<WritableComparable, HCatRecord, IntWritable, HCatRecord> {
+
+        String s;
+        Integer i;
+        Double d;
+
+        @Override
+        protected void map(WritableComparable key, HCatRecord value,
+                           org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord,
+                               IntWritable, HCatRecord>.Context context)
+            throws IOException, InterruptedException {
+            s = value.get(0) == null ? null : (String) value.get(0);
+            i = value.get(1) == null ? null : (Integer) value.get(1);
+            d = value.get(2) == null ? null : (Double) value.get(2);
+
+            HCatRecord record = new DefaultHCatRecord(3);
+            record.set(0, s);
+            record.set(1, i);
+            record.set(2, d);
+
+            context.write(null, record);
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String tableName = args[1];
+        String outputDir = args[2];
+        String dbName = null;
+
+        String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "ReadJson");
+        HCatInputFormat.setInput(job, InputJobInfo.create(
+            dbName, tableName, null));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setOutputFormatClass(TextOutputFormat.class);
+        job.setJarByClass(ReadJson.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(IntWritable.class);
+        job.setOutputValueClass(HCatRecord.class);
+        job.setNumReduceTasks(0);
+        FileOutputFormat.setOutputPath(job, new Path(outputDir));
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new ReadJson(), args);
+        System.exit(exitCode);
     }
-  }
-  
-   public int run(String[] args) throws Exception {
-    Configuration conf = getConf();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
-    String serverUri = args[0];
-    String tableName = args[1];
-    String outputDir = args[2];
-    String dbName = null;
-    
-    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
-    if(principalID != null)
-    conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
-    Job job = new Job(conf, "ReadJson");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null));
-    // initialize HCatOutputFormat
-    
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    job.setJarByClass(ReadJson.class);
-    job.setMapperClass(Map.class);
-    job.setOutputKeyClass(IntWritable.class);
-    job.setOutputValueClass(HCatRecord.class);
-    job.setNumReduceTasks(0);
-    FileOutputFormat.setOutputPath(job, new Path(outputDir));
-    return (job.waitForCompletion(true) ? 0 : 1);
-  }
-   
-   public static void main(String[] args) throws Exception {
-       int exitCode = ToolRunner.run(new ReadJson(), args);
-       System.exit(exitCode);
-   }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadRC.java Mon Sep 10 23:28:55 2012
@@ -43,70 +43,70 @@ import org.apache.hcatalog.mapreduce.Inp
  * table. It performs a group by on the first column and a SUM operation on the
  * other columns. This is to simulate a typical operation in a map reduce program
  * to test that hcat hands the right data to the map reduce program
- * 
+ *
  * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat jar>
-            The <tab|ctrla> argument controls the output delimiter
-            The hcat jar location should be specified as file://<full path to jar>
+ The <tab|ctrla> argument controls the output delimiter
+ The hcat jar location should be specified as file://<full path to jar>
  */
 public class ReadRC extends Configured implements Tool {
-    
-  public static class Map
-       extends Mapper<WritableComparable, HCatRecord, IntWritable, HCatRecord>{
-      
-      String name;
-      int age;
-      double gpa;
-      
-    @Override
-  protected void map(WritableComparable key, HCatRecord value, 
-          org.apache.hadoop.mapreduce.Mapper<WritableComparable,HCatRecord,
-          IntWritable,HCatRecord>.Context context) 
-    throws IOException ,InterruptedException {
-        name = (String)value.get(0);
-        age = (Integer)value.get(1);
-        gpa = (Double)value.get(2);
-        gpa = Math.floor(gpa) + 0.1;
-        
-        HCatRecord record = new DefaultHCatRecord(3);
-        record.set(0, name);
-        record.set(1, age);
-        record.set(2, gpa);
-        
-        context.write(null, record);
 
+    public static class Map
+        extends Mapper<WritableComparable, HCatRecord, IntWritable, HCatRecord> {
+
+        String name;
+        int age;
+        double gpa;
+
+        @Override
+        protected void map(WritableComparable key, HCatRecord value,
+                           org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord,
+                               IntWritable, HCatRecord>.Context context)
+            throws IOException, InterruptedException {
+            name = (String) value.get(0);
+            age = (Integer) value.get(1);
+            gpa = (Double) value.get(2);
+            gpa = Math.floor(gpa) + 0.1;
+
+            HCatRecord record = new DefaultHCatRecord(3);
+            record.set(0, name);
+            record.set(1, age);
+            record.set(2, gpa);
+
+            context.write(null, record);
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String tableName = args[1];
+        String outputDir = args[2];
+        String dbName = null;
+
+        String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "ReadRC");
+        HCatInputFormat.setInput(job, InputJobInfo.create(
+            dbName, tableName, null));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setOutputFormatClass(TextOutputFormat.class);
+        job.setJarByClass(ReadRC.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(IntWritable.class);
+        job.setOutputValueClass(HCatRecord.class);
+        job.setNumReduceTasks(0);
+        FileOutputFormat.setOutputPath(job, new Path(outputDir));
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new ReadRC(), args);
+        System.exit(exitCode);
     }
-  }
-  
-   public int run(String[] args) throws Exception {
-    Configuration conf = getConf();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
-    String serverUri = args[0];
-    String tableName = args[1];
-    String outputDir = args[2];
-    String dbName = null;
-    
-    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
-    if(principalID != null)
-    conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
-    Job job = new Job(conf, "ReadRC");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null));
-    // initialize HCatOutputFormat
-    
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    job.setJarByClass(ReadRC.class);
-    job.setMapperClass(Map.class);
-    job.setOutputKeyClass(IntWritable.class);
-    job.setOutputValueClass(HCatRecord.class);
-    job.setNumReduceTasks(0);
-    FileOutputFormat.setOutputPath(job, new Path(outputDir));
-    return (job.waitForCompletion(true) ? 0 : 1);
-  }
-   
-   public static void main(String[] args) throws Exception {
-       int exitCode = ToolRunner.run(new ReadRC(), args);
-       System.exit(exitCode);
-   }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadText.java Mon Sep 10 23:28:55 2012
@@ -43,81 +43,81 @@ import org.apache.hcatalog.mapreduce.Inp
  * table. It performs a group by on the first column and a SUM operation on the
  * other columns. This is to simulate a typical operation in a map reduce program
  * to test that hcat hands the right data to the map reduce program
- * 
+ *
  * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat jar>
-            The <tab|ctrla> argument controls the output delimiter
-            The hcat jar location should be specified as file://<full path to jar>
+ The <tab|ctrla> argument controls the output delimiter
+ The hcat jar location should be specified as file://<full path to jar>
  */
 public class ReadText extends Configured implements Tool {
-    
-  public static class Map
-       extends Mapper<WritableComparable, HCatRecord, IntWritable, HCatRecord>{
-      
-      byte t;
-      short si;
-      int i;
-      long b;
-      float f;
-      double d;
-      String s;
-      
-    @Override
-  protected void map(WritableComparable key, HCatRecord value, 
-          org.apache.hadoop.mapreduce.Mapper<WritableComparable,HCatRecord,
-          IntWritable,HCatRecord>.Context context) 
-    throws IOException ,InterruptedException {
-        t = (Byte)value.get(0);
-        si = (Short)value.get(1);
-        i = (Integer)value.get(2);
-        b = (Long)value.get(3);
-        f = (Float)value.get(4);
-        d = (Double)value.get(5);
-        s = (String)value.get(6);
-        
-        HCatRecord record = new DefaultHCatRecord(7);
-        record.set(0, t);
-        record.set(1, si);
-        record.set(2, i);
-        record.set(3, b);
-        record.set(4, f);
-        record.set(5, d);
-        record.set(6, s);
-        
-        context.write(null, record);
 
+    public static class Map
+        extends Mapper<WritableComparable, HCatRecord, IntWritable, HCatRecord> {
+
+        byte t;
+        short si;
+        int i;
+        long b;
+        float f;
+        double d;
+        String s;
+
+        @Override
+        protected void map(WritableComparable key, HCatRecord value,
+                           org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord,
+                               IntWritable, HCatRecord>.Context context)
+            throws IOException, InterruptedException {
+            t = (Byte) value.get(0);
+            si = (Short) value.get(1);
+            i = (Integer) value.get(2);
+            b = (Long) value.get(3);
+            f = (Float) value.get(4);
+            d = (Double) value.get(5);
+            s = (String) value.get(6);
+
+            HCatRecord record = new DefaultHCatRecord(7);
+            record.set(0, t);
+            record.set(1, si);
+            record.set(2, i);
+            record.set(3, b);
+            record.set(4, f);
+            record.set(5, d);
+            record.set(6, s);
+
+            context.write(null, record);
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String tableName = args[1];
+        String outputDir = args[2];
+        String dbName = null;
+
+        String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "ReadText");
+        HCatInputFormat.setInput(job, InputJobInfo.create(
+            dbName, tableName, null));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setOutputFormatClass(TextOutputFormat.class);
+        job.setJarByClass(ReadText.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(IntWritable.class);
+        job.setOutputValueClass(HCatRecord.class);
+        job.setNumReduceTasks(0);
+        FileOutputFormat.setOutputPath(job, new Path(outputDir));
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new ReadText(), args);
+        System.exit(exitCode);
     }
-  }
-  
-   public int run(String[] args) throws Exception {
-    Configuration conf = getConf();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
-    String serverUri = args[0];
-    String tableName = args[1];
-    String outputDir = args[2];
-    String dbName = null;
-    
-    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
-    if(principalID != null)
-    conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
-    Job job = new Job(conf, "ReadText");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null));
-    // initialize HCatOutputFormat
-    
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    job.setJarByClass(ReadText.class);
-    job.setMapperClass(Map.class);
-    job.setOutputKeyClass(IntWritable.class);
-    job.setOutputValueClass(HCatRecord.class);
-    job.setNumReduceTasks(0);
-    FileOutputFormat.setOutputPath(job, new Path(outputDir));
-    return (job.waitForCompletion(true) ? 0 : 1);
-  }
-   
-   public static void main(String[] args) throws Exception {
-       int exitCode = ToolRunner.run(new ReadText(), args);
-       System.exit(exitCode);
-   }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/ReadWrite.java Mon Sep 10 23:28:55 2012
@@ -43,7 +43,7 @@ import org.apache.hcatalog.mapreduce.Out
  * table. It performs a group by on the first column and a SUM operation on the
  * other columns. This is to simulate a typical operation in a map reduce
  * program to test that hcat hands the right data to the map reduce program
- * 
+ *
  * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat
  * jar> The <tab|ctrla> argument controls the output delimiter The hcat jar
  * location should be specified as file://<full path to jar>
@@ -51,7 +51,7 @@ import org.apache.hcatalog.mapreduce.Out
 public class ReadWrite extends Configured implements Tool {
 
     public static class Map extends
-            Mapper<WritableComparable, HCatRecord, Text, HCatRecord> {
+        Mapper<WritableComparable, HCatRecord, Text, HCatRecord> {
 
         String name;
         int age;
@@ -59,10 +59,10 @@ public class ReadWrite extends Configure
 
         @Override
         protected void map(
-                WritableComparable key,
-                HCatRecord value,
-                org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, Text, HCatRecord>.Context context)
-                throws IOException, InterruptedException {
+            WritableComparable key,
+            HCatRecord value,
+            org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord, Text, HCatRecord>.Context context)
+            throws IOException, InterruptedException {
             name = (String) value.get(0);
             age = (Integer) value.get(1);
             gpa = (Double) value.get(2);
@@ -81,12 +81,12 @@ public class ReadWrite extends Configure
         String dbName = null;
 
         String principalID = System
-                .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+            .getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
         if (principalID != null)
             conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
         Job job = new Job(conf, "ReadWrite");
         HCatInputFormat.setInput(job, InputJobInfo.create(dbName,
-                inputTableName, null));
+            inputTableName, null));
         // initialize HCatOutputFormat
 
         job.setInputFormatClass(HCatInputFormat.class);
@@ -95,10 +95,10 @@ public class ReadWrite extends Configure
         job.setOutputKeyClass(Text.class);
         job.setOutputValueClass(DefaultHCatRecord.class);
         HCatOutputFormat.setOutput(job, OutputJobInfo.create(dbName,
-                outputTableName, null));
+            outputTableName, null));
         HCatSchema s = HCatInputFormat.getTableSchema(job);
         System.err.println("INFO: output schema explicitly set for writing:"
-                + s);
+            + s);
         HCatOutputFormat.setSchema(job, s);
         job.setOutputFormatClass(HCatOutputFormat.class);
         return (job.waitForCompletion(true) ? 0 : 1);

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/SimpleRead.java Mon Sep 10 23:28:55 2012
@@ -43,65 +43,65 @@ import org.apache.hcatalog.mapreduce.Inp
  * table. It performs a group by on the first column and a SUM operation on the
  * other columns. This is to simulate a typical operation in a map reduce program
  * to test that hcat hands the right data to the map reduce program
- * 
+ *
  * Usage: hadoop jar sumnumbers <serveruri> <output dir> <-libjars hive-hcat jar>
-            The <tab|ctrla> argument controls the output delimiter
-            The hcat jar location should be specified as file://<full path to jar>
+ The <tab|ctrla> argument controls the output delimiter
+ The hcat jar location should be specified as file://<full path to jar>
  */
 public class SimpleRead extends Configured implements Tool {
 
     private static final String TABLE_NAME = "studenttab10k";
     private static final String TAB = "\t";
-    
-  public static class Map
-       extends Mapper<WritableComparable, HCatRecord, Text, IntWritable>{
-      
-      String name;
-      int age;
-      double gpa;
-      
-    @Override
-  protected void map(WritableComparable key, HCatRecord value, 
-          org.apache.hadoop.mapreduce.Mapper<WritableComparable,HCatRecord,
-          Text,IntWritable>.Context context) 
-    throws IOException ,InterruptedException {
-        name = (String) value.get(0);
-        age = (Integer) value.get(1);
-        gpa = (Double) value.get(2);
-        context.write(new Text(name), new IntWritable(age));
 
+    public static class Map
+        extends Mapper<WritableComparable, HCatRecord, Text, IntWritable> {
+
+        String name;
+        int age;
+        double gpa;
+
+        @Override
+        protected void map(WritableComparable key, HCatRecord value,
+                           org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord,
+                               Text, IntWritable>.Context context)
+            throws IOException, InterruptedException {
+            name = (String) value.get(0);
+            age = (Integer) value.get(1);
+            gpa = (Double) value.get(2);
+            context.write(new Text(name), new IntWritable(age));
+
+        }
+    }
+
+    public int run(String[] args) throws Exception {
+        Configuration conf = getConf();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+
+        String serverUri = args[0];
+        String tableName = args[1];
+        String outputDir = args[2];
+        String dbName = null;
+
+        String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "SimpleRead");
+        HCatInputFormat.setInput(job, InputJobInfo.create(
+            dbName, tableName, null));
+        // initialize HCatOutputFormat
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setOutputFormatClass(TextOutputFormat.class);
+        job.setJarByClass(SimpleRead.class);
+        job.setMapperClass(Map.class);
+        job.setOutputKeyClass(Text.class);
+        job.setOutputValueClass(IntWritable.class);
+        FileOutputFormat.setOutputPath(job, new Path(outputDir));
+        return (job.waitForCompletion(true) ? 0 : 1);
+    }
+
+    public static void main(String[] args) throws Exception {
+        int exitCode = ToolRunner.run(new SimpleRead(), args);
+        System.exit(exitCode);
     }
-  }
-  
-   public int run(String[] args) throws Exception {
-    Configuration conf = getConf();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-
-    String serverUri = args[0];
-    String tableName = args[1];
-    String outputDir = args[2];
-    String dbName = null;
-    
-    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
-    if(principalID != null)
-    conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
-    Job job = new Job(conf, "SimpleRead");
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null));
-    // initialize HCatOutputFormat
-    
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setOutputFormatClass(TextOutputFormat.class);
-    job.setJarByClass(SimpleRead.class);
-    job.setMapperClass(Map.class);
-    job.setOutputKeyClass(Text.class);
-    job.setOutputValueClass(IntWritable.class);
-    FileOutputFormat.setOutputPath(job, new Path(outputDir));
-    return (job.waitForCompletion(true) ? 0 : 1);
-  }
-   
-   public static void main(String[] args) throws Exception {
-       int exitCode = ToolRunner.run(new SimpleRead(), args);
-       System.exit(exitCode);
-   }
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreComplex.java Mon Sep 10 23:28:55 2012
@@ -43,93 +43,93 @@ import org.apache.hcatalog.mapreduce.Out
  * table and writes to "complex_nopart_empty_initially" table. It reads data from complex which
  * is an unpartitioned table and stores the data as-is into complex_empty_initially table
  * (which is also unpartitioned)
- * 
+ *
  * Usage: hadoop jar testudf.jar storecomplex <serveruri> <-libjars hive-hcat jar>  
-        The hcat jar location should be specified as file://<full path to jar>
+ The hcat jar location should be specified as file://<full path to jar>
  */
 public class StoreComplex {
 
     private static final String COMPLEX_TABLE_NAME = "complex";
     private static final String COMPLEX_NOPART_EMPTY_INITIALLY_TABLE_NAME = "complex_nopart_empty_initially";
-    
-    
-  public static class ComplexMapper 
-       extends Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord>{
-      
-    @Override
-  protected void map(WritableComparable key, HCatRecord value, 
-          org.apache.hadoop.mapreduce.Mapper<WritableComparable,HCatRecord,
-          WritableComparable,HCatRecord>.Context context) 
-    throws IOException ,InterruptedException {
-        // just write out the value as-is
-        context.write(new IntWritable(0), value);
 
-    }
-  }
-  
-  
-   public static void main(String[] args) throws Exception {
-    Configuration conf = new Configuration();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-    String[] otherArgs = new String[1];
-    int j = 0;
-    for(int i = 0; i < args.length; i++) {
-        if(args[i].equals("-libjars")) {
-            // generic options parser doesn't seem to work!
-            conf.set("tmpjars", args[i+1]);
-            i = i+1; // skip it , the for loop will skip its value                
-        } else {
-            otherArgs[j++] = args[i];
+
+    public static class ComplexMapper
+        extends Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord> {
+
+        @Override
+        protected void map(WritableComparable key, HCatRecord value,
+                           org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord,
+                               WritableComparable, HCatRecord>.Context context)
+            throws IOException, InterruptedException {
+            // just write out the value as-is
+            context.write(new IntWritable(0), value);
+
         }
     }
-    if (otherArgs.length != 1) {
-      usage();
-    }
-    String serverUri = otherArgs[0];
-    String tableName = COMPLEX_TABLE_NAME;
-    String dbName = "default";
-    Map<String, String> outputPartitionKvps = new HashMap<String, String>();
-    String outputTableName = null;
-    outputTableName = COMPLEX_NOPART_EMPTY_INITIALLY_TABLE_NAME;
-    // test with null or empty randomly
-    if(new Random().nextInt(2) == 0) {
-        System.err.println("INFO: output partition keys set to null for writing");
-        outputPartitionKvps = null;
-    }
-    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
-    if(principalID != null)
-    conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
-    Job job = new Job(conf, "storecomplex");
-    // initialize HCatInputFormat
-
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null));
-    // initialize HCatOutputFormat
-    HCatOutputFormat.setOutput(job, OutputJobInfo.create(
+
+
+    public static void main(String[] args) throws Exception {
+        Configuration conf = new Configuration();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+        String[] otherArgs = new String[1];
+        int j = 0;
+        for (int i = 0; i < args.length; i++) {
+            if (args[i].equals("-libjars")) {
+                // generic options parser doesn't seem to work!
+                conf.set("tmpjars", args[i + 1]);
+                i = i + 1; // skip it , the for loop will skip its value
+            } else {
+                otherArgs[j++] = args[i];
+            }
+        }
+        if (otherArgs.length != 1) {
+            usage();
+        }
+        String serverUri = otherArgs[0];
+        String tableName = COMPLEX_TABLE_NAME;
+        String dbName = "default";
+        Map<String, String> outputPartitionKvps = new HashMap<String, String>();
+        String outputTableName = null;
+        outputTableName = COMPLEX_NOPART_EMPTY_INITIALLY_TABLE_NAME;
+        // test with null or empty randomly
+        if (new Random().nextInt(2) == 0) {
+            System.err.println("INFO: output partition keys set to null for writing");
+            outputPartitionKvps = null;
+        }
+        String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "storecomplex");
+        // initialize HCatInputFormat
+
+        HCatInputFormat.setInput(job, InputJobInfo.create(
+            dbName, tableName, null));
+        // initialize HCatOutputFormat
+        HCatOutputFormat.setOutput(job, OutputJobInfo.create(
             dbName, outputTableName, outputPartitionKvps));
-    
-    
-    HCatSchema s = HCatInputFormat.getTableSchema(job);
-    HCatOutputFormat.setSchema(job, s);
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setOutputFormatClass(HCatOutputFormat.class);
-    job.setJarByClass(StoreComplex.class);
-    job.setMapperClass(ComplexMapper.class);
-    job.setOutputKeyClass(IntWritable.class);
-    job.setOutputValueClass(DefaultHCatRecord.class);
-    System.exit(job.waitForCompletion(true) ? 0 : 1);
-  }
+
+
+        HCatSchema s = HCatInputFormat.getTableSchema(job);
+        HCatOutputFormat.setSchema(job, s);
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setOutputFormatClass(HCatOutputFormat.class);
+        job.setJarByClass(StoreComplex.class);
+        job.setMapperClass(ComplexMapper.class);
+        job.setOutputKeyClass(IntWritable.class);
+        job.setOutputValueClass(DefaultHCatRecord.class);
+        System.exit(job.waitForCompletion(true) ? 0 : 1);
+    }
 
 
     /**
-     * 
+     *
      */
     private static void usage() {
         System.err.println("Usage: hadoop jar testudf.jar storecomplex <serveruri> <-libjars hive-hcat jar>\n" +
-        "The hcat jar location should be specified as file://<full path to jar>\n");
+            "The hcat jar location should be specified as file://<full path to jar>\n");
         System.exit(2);
-        
+
     }
-   
+
 
 }

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/udfs/java/org/apache/hcatalog/utils/StoreDemo.java Mon Sep 10 23:28:55 2012
@@ -48,104 +48,105 @@ import org.apache.hcatalog.mapreduce.Out
  * 'numbers_nopart_pig_empty_initially' (unpartitioned) table with the tinyint
  * and smallint columns in "numbers" being stored as "int" (since pig cannot handle
  * tinyint and smallint)
- * 
+ *
  * Usage: hadoop jar storenumbers <serveruri> <part|nopart|nopart_pig> <-libjars hive-hcat jar>
-        If the second argument is "part" data is written to datestamp = '2010101' partition of the numbers_part_empty_initially table.
-        If the second argument is "nopart", data is written to the unpartitioned numbers_nopart_empty_initially table.
-        If the second argument is "nopart_pig", data is written to the unpartitioned numbers_nopart_pig_empty_initially table.
-        The hcat jar location should be specified as file://<full path to jar>
+ If the second argument is "part" data is written to datestamp = '2010101' partition of the numbers_part_empty_initially table.
+ If the second argument is "nopart", data is written to the unpartitioned numbers_nopart_empty_initially table.
+ If the second argument is "nopart_pig", data is written to the unpartitioned numbers_nopart_pig_empty_initially table.
+ The hcat jar location should be specified as file://<full path to jar>
  */
 public class StoreDemo {
 
     private static final String NUMBERS_PARTITIONED_TABLE_NAME = "demo_partitioned";
     private static final String NUMBERS_TABLE_NAME = "demo";
-    
-  public static class SumMapper 
-       extends Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord>{
-      
-    
-      Integer intnum;
-     
-      Double doublenum;
-    @Override
-  protected void map(WritableComparable key, HCatRecord value, 
-          org.apache.hadoop.mapreduce.Mapper<WritableComparable,HCatRecord,
-          WritableComparable,HCatRecord>.Context context) 
-    throws IOException ,InterruptedException {
-        intnum = ((Integer)value.get(0));
-        value.set(0, intnum + 20);
-        doublenum = ((Double) value.get(1));
-        value.set(1, (Double) (doublenum + 20));
-        context.write(new IntWritable(0), value);
 
-    }
-  }
-  
-  
-   public static void main(String[] args) throws Exception {
-    Configuration conf = new Configuration();
-    args = new GenericOptionsParser(conf, args).getRemainingArgs();
-    String[] otherArgs = new String[1];
-    int j = 0;
-    for(int i = 0; i < args.length; i++) {
-        if(args[i].equals("-libjars")) {
-            // generic options parser doesn't seem to work!
-            conf.set("tmpjars", args[i+1]);
-            i = i+1; // skip it , the for loop will skip its value                
-        } else {
-            otherArgs[j++] = args[i];
+    public static class SumMapper
+        extends Mapper<WritableComparable, HCatRecord, WritableComparable, HCatRecord> {
+
+
+        Integer intnum;
+
+        Double doublenum;
+
+        @Override
+        protected void map(WritableComparable key, HCatRecord value,
+                           org.apache.hadoop.mapreduce.Mapper<WritableComparable, HCatRecord,
+                               WritableComparable, HCatRecord>.Context context)
+            throws IOException, InterruptedException {
+            intnum = ((Integer) value.get(0));
+            value.set(0, intnum + 20);
+            doublenum = ((Double) value.get(1));
+            value.set(1, (Double) (doublenum + 20));
+            context.write(new IntWritable(0), value);
+
         }
     }
-    if (otherArgs.length != 1) {
-      usage();
-    }
-    String serverUri = otherArgs[0];
-    
-    String tableName = NUMBERS_TABLE_NAME;
-    String dbName = "default";
-    Map<String, String> outputPartitionKvps = new HashMap<String, String>();
-    String outputTableName = NUMBERS_PARTITIONED_TABLE_NAME;
-    outputPartitionKvps.put("datestamp", "20100102");
-    
-    String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
-    if(principalID != null)
-    conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
-    Job job = new Job(conf, "storedemo");
-    // initialize HCatInputFormat
-    HCatInputFormat.setInput(job, InputJobInfo.create(
-    		dbName, tableName, null));
-    // initialize HCatOutputFormat
-    HCatOutputFormat.setOutput(job, OutputJobInfo.create(
+
+
+    public static void main(String[] args) throws Exception {
+        Configuration conf = new Configuration();
+        args = new GenericOptionsParser(conf, args).getRemainingArgs();
+        String[] otherArgs = new String[1];
+        int j = 0;
+        for (int i = 0; i < args.length; i++) {
+            if (args[i].equals("-libjars")) {
+                // generic options parser doesn't seem to work!
+                conf.set("tmpjars", args[i + 1]);
+                i = i + 1; // skip it , the for loop will skip its value
+            } else {
+                otherArgs[j++] = args[i];
+            }
+        }
+        if (otherArgs.length != 1) {
+            usage();
+        }
+        String serverUri = otherArgs[0];
+
+        String tableName = NUMBERS_TABLE_NAME;
+        String dbName = "default";
+        Map<String, String> outputPartitionKvps = new HashMap<String, String>();
+        String outputTableName = NUMBERS_PARTITIONED_TABLE_NAME;
+        outputPartitionKvps.put("datestamp", "20100102");
+
+        String principalID = System.getProperty(HCatConstants.HCAT_METASTORE_PRINCIPAL);
+        if (principalID != null)
+            conf.set(HCatConstants.HCAT_METASTORE_PRINCIPAL, principalID);
+        Job job = new Job(conf, "storedemo");
+        // initialize HCatInputFormat
+        HCatInputFormat.setInput(job, InputJobInfo.create(
+            dbName, tableName, null));
+        // initialize HCatOutputFormat
+        HCatOutputFormat.setOutput(job, OutputJobInfo.create(
             dbName, outputTableName, outputPartitionKvps));
-    // test with and without specifying schema randomly
-    HCatSchema s = HCatInputFormat.getTableSchema(job);
-    System.err.println("INFO: output schema explicitly set for writing:" + s);
-    HCatOutputFormat.setSchema(job, s);
-    
-    job.setInputFormatClass(HCatInputFormat.class);
-    job.setOutputFormatClass(HCatOutputFormat.class);
-    job.setJarByClass(StoreDemo.class);
-    job.setMapperClass(SumMapper.class);
-    job.setOutputKeyClass(IntWritable.class);
-    job.setNumReduceTasks(0);
-    job.setOutputValueClass(DefaultHCatRecord.class);
-    System.exit(job.waitForCompletion(true) ? 0 : 1);
-  }
+        // test with and without specifying schema randomly
+        HCatSchema s = HCatInputFormat.getTableSchema(job);
+        System.err.println("INFO: output schema explicitly set for writing:" + s);
+        HCatOutputFormat.setSchema(job, s);
+
+        job.setInputFormatClass(HCatInputFormat.class);
+        job.setOutputFormatClass(HCatOutputFormat.class);
+        job.setJarByClass(StoreDemo.class);
+        job.setMapperClass(SumMapper.class);
+        job.setOutputKeyClass(IntWritable.class);
+        job.setNumReduceTasks(0);
+        job.setOutputValueClass(DefaultHCatRecord.class);
+        System.exit(job.waitForCompletion(true) ? 0 : 1);
+    }
 
 
     /**
-     * 
+     *
      */
     private static void usage() {
         System.err.println("Usage: hadoop jar storenumbers <serveruri> <part|nopart|nopart_pig> <-libjars hive-hcat jar>\n" +
-                "\tIf the second argument is \"part\" data is written to datestamp = '2010101' partition of " +
-                "the numbers_part_empty_initially table.\n\tIf the second argument is \"nopart\", data is written to " +
-                "the unpartitioned numbers_nopart_empty_initially table.\n\tIf the second argument is \"nopart_pig\", " +
-                "data is written to the unpartitioned numbers_nopart_pig_empty_initially table.\nt" +
-        "The hcat jar location should be specified as file://<full path to jar>\n");
-    System.exit(2);
-        
+            "\tIf the second argument is \"part\" data is written to datestamp = '2010101' partition of " +
+            "the numbers_part_empty_initially table.\n\tIf the second argument is \"nopart\", data is written to " +
+            "the unpartitioned numbers_nopart_empty_initially table.\n\tIf the second argument is \"nopart_pig\", " +
+            "data is written to the unpartitioned numbers_nopart_pig_empty_initially table.\nt" +
+            "The hcat jar location should be specified as file://<full path to jar>\n");
+        System.exit(2);
+
     }
-   
+
 
 }



Mime
View raw message