pig-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dvrya...@apache.org
Subject svn commit: r1359887 - in /pig/trunk: ./ src/org/apache/pig/ test/org/apache/pig/test/
Date Tue, 10 Jul 2012 20:32:46 GMT
Author: dvryaboy
Date: Tue Jul 10 20:32:46 2012
New Revision: 1359887

URL: http://svn.apache.org/viewvc?rev=1359887&view=rev
Log:
Remove "PIG" exec type

Modified:
    pig/trunk/CHANGES.txt
    pig/trunk/src/org/apache/pig/ExecType.java
    pig/trunk/src/org/apache/pig/Main.java
    pig/trunk/src/org/apache/pig/PigServer.java
    pig/trunk/test/org/apache/pig/test/PigExecTestCase.java
    pig/trunk/test/org/apache/pig/test/TestParser.java
    pig/trunk/test/org/apache/pig/test/TestProjectRange.java

Modified: pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1359887&r1=1359886&r2=1359887&view=diff
==============================================================================
--- pig/trunk/CHANGES.txt (original)
+++ pig/trunk/CHANGES.txt Tue Jul 10 20:32:46 2012
@@ -24,6 +24,8 @@ INCOMPATIBLE CHANGES
 
 IMPROVEMENTS
 
+PIG-2804: Remove "PIG" exec type (dvryaboy)
+
 PIG-2726: Handling legitimate NULL values in Cube operator (prasanth_j via dvryaboy)
 
 PIG-2808: Add *.project to .gitignore (azaroth)

Modified: pig/trunk/src/org/apache/pig/ExecType.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/ExecType.java?rev=1359887&r1=1359886&r2=1359887&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/ExecType.java (original)
+++ pig/trunk/src/org/apache/pig/ExecType.java Tue Jul 10 20:32:46 2012
@@ -31,9 +31,24 @@ public enum ExecType implements Serializ
     /**
      * Use the Hadoop Map/Reduce framework
      */
-    MAPREDUCE,
+    MAPREDUCE;
+
     /**
-     * Use the Experimental Hadoop framework; not available yet.
+     * Given a string, determine the exec type.
+     * @param str accepted values are 'local', 'mapreduce', and 'mapred'
+     * @return exectype as ExecType
      */
-    PIG
+    public static ExecType fromString(String execString) throws PigException {
+        if (execString.equals("mapred")) {
+            return MAPREDUCE;
+        } else {
+            try {
+                return ExecType.valueOf(execString.toUpperCase());
+            } catch (IllegalArgumentException e) {
+                int errCode = 2040;
+                String msg = "Unknown exec type: " + execString;
+                throw new PigException(msg, errCode, e);
+            }
+        }
+    }
 }

Modified: pig/trunk/src/org/apache/pig/Main.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/Main.java?rev=1359887&r1=1359886&r2=1359887&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/Main.java (original)
+++ pig/trunk/src/org/apache/pig/Main.java Tue Jul 10 20:32:46 2012
@@ -213,7 +213,7 @@ static int run(String args[], PigProgres
         ExecType execType = ExecType.MAPREDUCE ;
         String execTypeString = properties.getProperty("exectype");
         if(execTypeString!=null && execTypeString.length()>0){
-            execType = PigServer.parseExecType(execTypeString);
+            execType = ExecType.fromString(execTypeString);
         }
 
         // set up client side system properties in UDF context
@@ -327,7 +327,7 @@ static int run(String args[], PigProgres
 
             case 'x':
                 try {
-                    execType = PigServer.parseExecType(opts.getValStr());
+                    execType = ExecType.fromString(opts.getValStr());
                     } catch (IOException e) {
                         throw new RuntimeException("ERROR: Unrecognized exectype.", e);
                     }
@@ -537,7 +537,7 @@ static int run(String args[], PigProgres
             return ReturnCode.SUCCESS;
         } else {
             pigContext.getProperties().setProperty(PigContext.PIG_CMD_ARGS_REMAINDERS, ObjectSerializer.serialize(remainders));
-            
+
             // They have a pig script they want us to run.
             mode = ExecMode.FILE;
 

Modified: pig/trunk/src/org/apache/pig/PigServer.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/PigServer.java?rev=1359887&r1=1359886&r2=1359887&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/PigServer.java (original)
+++ pig/trunk/src/org/apache/pig/PigServer.java Tue Jul 10 20:32:46 2012
@@ -124,35 +124,6 @@ public class PigServer {
 
     public static final String PRETTY_PRINT_SCHEMA_PROPERTY = "pig.pretty.print.schema";
 
-    /**
-     * Given a string, determine the exec type.
-     * @param str accepted values are 'local', 'mapreduce', and 'mapred'
-     * @return exectype as ExecType
-     */
-    public static ExecType parseExecType(String str) throws IOException {
-        String normStr = str.toLowerCase();
-
-        if (normStr.equals("local")) {
-            return ExecType.LOCAL;
-        }
-        if (normStr.equals("mapreduce")) {
-            return ExecType.MAPREDUCE;
-        }
-        if (normStr.equals("mapred")) {
-            return ExecType.MAPREDUCE;
-        }
-        if (normStr.equals("pig")) {
-            return ExecType.PIG;
-        }
-        if (normStr.equals("pigbody")) {
-            return ExecType.PIG;
-        }
-
-        int errCode = 2040;
-        String msg = "Unknown exec type: " + str;
-        throw new PigException(msg, errCode, PigException.BUG);
-    }
-
     /*
      * The data structure to support grunt shell operations.
      * The grunt shell can only work on one graph at a time.
@@ -167,15 +138,15 @@ public class PigServer {
      * The current Graph the grunt shell is working on.
      */
     private Graph currDAG;
-    
+
     protected final PigContext pigContext;
-    
+
     private String jobName;
 
     private String jobPriority;
 
     private final static AtomicInteger scopeCounter = new AtomicInteger(0);
-    
+
     protected final String scope = constructScope();
 
 
@@ -206,7 +177,7 @@ public class PigServer {
      * @throws IOException
      */
     public PigServer(String execTypeString) throws ExecException, IOException {
-        this(parseExecType(execTypeString));
+        this(ExecType.fromString(execTypeString));
     }
 
     /**
@@ -233,7 +204,7 @@ public class PigServer {
 
         aggregateWarning = "true".equalsIgnoreCase(pigContext.getProperties().getProperty("aggregate.warning"));
         isMultiQuery = "true".equalsIgnoreCase(pigContext.getProperties().getProperty("opt.multiquery","true"));
-        
+
         jobName = pigContext.getProperties().getProperty(
                 PigContext.JOB_NAME,
                 PigContext.JOB_NAME_PREFIX + ":DefaultJobName");
@@ -339,11 +310,11 @@ public class PigServer {
      * Submits a batch of Pig commands for execution.
      *
      * @return list of jobs being executed
-     * @throws IOException 
+     * @throws IOException
      */
     public List<ExecJob> executeBatch() throws IOException {
         PigStats stats = null;
-        
+
         if( !isMultiQuery ) {
             // ignore if multiquery is off
             stats = PigStats.get();
@@ -360,7 +331,7 @@ public class PigServer {
 
         return getJobs(stats);
     }
-    
+
     /**
      * Retrieves a list of Job objects from the PigStats object
      * @param stats
@@ -459,15 +430,15 @@ public class PigServer {
 
         return resourceLocation;
     }
-    
+
     /**
-     * Registers a jar file. Name of the jar file can be an absolute or 
+     * Registers a jar file. Name of the jar file can be an absolute or
      * relative path.
-     * 
+     *
      * If multiple resources are found with the specified name, the
      * first one is registered as returned by getSystemResources.
      * A warning is issued to inform the user.
-     * 
+     *
      * @param name of the jar file to register
      * @throws IOException
      */
@@ -518,11 +489,11 @@ public class PigServer {
         String cwd = new File(".").getCanonicalPath();
         String filePath = f.getCanonicalPath();
         //Use the relative path in the jar, if the path specified is relative
-        String nameInJar = filePath.equals(cwd + File.separator + path) ? 
+        String nameInJar = filePath.equals(cwd + File.separator + path) ?
                 filePath.substring(cwd.length() + 1) : filePath;
         pigContext.addScriptFile(nameInJar, filePath);
         if(scriptingLang != null) {
-            ScriptEngine se = ScriptEngine.getInstance(scriptingLang);    
+            ScriptEngine se = ScriptEngine.getInstance(scriptingLang);
             se.registerFunctions(nameInJar, namespace, pigContext);
         }
     }
@@ -745,7 +716,7 @@ public class PigServer {
         try {
             LogicalRelationalOperator op = getOperatorForAlias( alias );
             LogicalSchema schema = op.getSchema();
-            
+
             boolean pretty = "true".equals(pigContext.getProperties()
                                    .getProperty(PRETTY_PRINT_SCHEMA_PROPERTY));
 
@@ -767,7 +738,7 @@ public class PigServer {
     /**
      * Write the schema for a nestedAlias to System.out. Denoted by
      * alias::nestedAlias.
-     * 
+     *
      * @param alias Alias whose schema has nestedAlias
      * @param nestedAlias Alias whose schema will be written out
      * @return Schema of alias dumped
@@ -1010,7 +981,7 @@ public class PigServer {
             currDAG.lp.explain(lps, format, verbose);
 
             pp.explain(pps, format, verbose);
-            
+
             MapRedUtil.checkLeafIsStore(pp, pigContext);
             MapReduceLauncher launcher = new MapReduceLauncher();
             launcher.explain(pp, pigContext, eps, format, verbose);
@@ -1145,7 +1116,7 @@ public class PigServer {
 
     /**
      * Return a map containing the logical plan associated with each alias.
-     * 
+     *
      * @return map
      */
     public Map<String, LogicalPlan> getAliases() {
@@ -1199,7 +1170,7 @@ public class PigServer {
             //the files being loaded in load don't exist anymore.
             e.printStackTrace();
         }
-        
+
         ExampleGenerator exgen = new ExampleGenerator( currDAG.lp, pigContext );
         try {
             return exgen.getExamples();
@@ -1210,22 +1181,22 @@ public class PigServer {
             e.printStackTrace(System.out);
             throw new IOException("Exception ", e);
         }
-     
+
     }
-    
+
     public void printHistory(boolean withNumbers) {
-    	
+
     	List<String> sc = currDAG.getScriptCache();
-    	
+
     	if(!sc.isEmpty()) {
     		for(int i = 0 ; i < sc.size(); i++) {
     			if(withNumbers) System.out.print((i+1)+"   ");
     			System.out.println(sc.get(i));
-    		}    		
+    		}
     	}
-    	
+
     }
-    
+
     private void buildStorePlan(String alias) throws IOException {
         currDAG.parseQuery();
         currDAG.buildPlan( alias );
@@ -1233,7 +1204,7 @@ public class PigServer {
         if( !isBatchOn() || alias != null ) {
             // MRCompiler needs a store to be the leaf - hence
             // add a store to the plan to explain
-            QueryParserUtils.attachStorePlan(scope, currDAG.lp, "fakefile", null, currDAG.getOperator( alias ), 
+            QueryParserUtils.attachStorePlan(scope, currDAG.lp, "fakefile", null, currDAG.getOperator( alias ),
                     "fake", pigContext );
         }
         currDAG.compile();
@@ -1252,17 +1223,17 @@ public class PigServer {
 
         // In this plan, all stores in the plan will be executed. They should be ignored if the plan is reused.
         currDAG.countExecutedStores();
-       
+
         currDAG.compile();
 
         if( currDAG.lp.size() == 0 ) {
-           return PigStats.get(); 
+           return PigStats.get();
         }
 
         pigContext.getProperties().setProperty("pig.logical.plan.signature", currDAG.lp.getSignature());
 
         PigStats stats = executeCompiledLogicalPlan();
-        
+
         return stats;
     }
 
@@ -1270,7 +1241,7 @@ public class PigServer {
         // discover pig features used in this script
         ScriptState.get().setScriptFeatures( currDAG.lp );
         PhysicalPlan pp = compilePp();
-       
+
         return launchPlan(pp, "job_pigexec_");
     }
 
@@ -1302,7 +1273,7 @@ public class PigServer {
         } finally {
             launcher.reset();
         }
-        
+
         for (OutputStats output : stats.getOutputStats()) {
             if (!output.isSuccessful()) {
                 POStore store = output.getPOStore();
@@ -1315,7 +1286,7 @@ public class PigServer {
                 }
             }
         }
-        
+
         return stats;
     }
 
@@ -1369,7 +1340,7 @@ public class PigServer {
         private int processedStores = 0;
 
         private LogicalPlan lp;
-        
+
         private int currentLineNum = 0;
 
         public Graph(boolean batchMode) {
@@ -1412,7 +1383,7 @@ public class PigServer {
         }
 
         /**
-         * Get the operator with the given alias in the raw plan. Null if not 
+         * Get the operator with the given alias in the raw plan. Null if not
          * found.
          */
         Operator getOperator(String alias) throws FrontendException {
@@ -1438,12 +1409,12 @@ public class PigServer {
         /**
          * Build a plan for the given alias. Extra branches and child branch under alias
          * will be ignored. Dependent branch (i.e. scalar) will be kept.
-         * @throws IOException 
+         * @throws IOException
          */
         void buildPlan(String alias) throws IOException {
             if( alias == null )
                 skipStores();
-            
+
             final Queue<Operator> queue = new LinkedList<Operator>();
             if( alias != null ) {
                 Operator op = getOperator( alias );
@@ -1463,11 +1434,11 @@ public class PigServer {
             }
 
             LogicalPlan plan = new LogicalPlan();
-            
+
             while( !queue.isEmpty() ) {
                 Operator currOp = queue.poll();
                 plan.add( currOp );
-                
+
                 List<Operator> preds = lp.getPredecessors( currOp );
                 if( preds != null ) {
                     List<Operator> ops = new ArrayList<Operator>( preds );
@@ -1477,7 +1448,7 @@ public class PigServer {
                         plan.connect( pred, currOp );
                     }
                 }
-                
+
                 // visit expression associated with currOp. If it refers to any other operator
                 // that operator is also going to be enqueued.
                 currOp.accept( new AllExpressionVisitor( plan, new DependencyOrderWalker( plan ) ) {
@@ -1490,17 +1461,17 @@ public class PigServer {
                                     Operator refOp = expr.getImplicitReferencedOperator();
                                     if( !queue.contains( refOp ) )
                                         queue.add( refOp );
-                                }                                
+                                }
                             };
                         }
                     }
                 );
-                
+
                 currOp.setPlan( plan );
             }
             lp = plan;
         }
-        
+
         /**
          *  Remove stores that have been executed previously from the overall plan.
          */
@@ -1518,14 +1489,14 @@ public class PigServer {
                     }
                 }
             }
-            
+
             for( Operator op : sinksToRemove ) {
                 Operator pred = lp.getPredecessors( op ).get(0);
                 lp.disconnect( pred, op );
                 lp.remove( op );
             }
         }
-        
+
         /**
          * Accumulate the given statement to previous query statements and generate
          * an overall (raw) plan.
@@ -1552,12 +1523,12 @@ public class PigServer {
             } else {
                 scriptCache.add( query );
             }
-           
+
             if(validateEachStatement){
                 validateQuery();
             }
             parseQuery();
-            
+
             if( !batchMode ) {
                 buildPlan( null );
                 for( Operator sink : lp.getSinks() ) {
@@ -1576,7 +1547,7 @@ public class PigServer {
                 }
             }
         }
-        
+
         void validateQuery() throws FrontendException {
             String query = buildQuery();
             QueryParserDriver parserDriver = new QueryParserDriver( pigContext, scope, fileNameMap );
@@ -1588,7 +1559,7 @@ public class PigServer {
                 throw ex;
             }
         }
-        
+
         public List<String> getScriptCache() {
         	return scriptCache;
         }
@@ -1626,25 +1597,25 @@ public class PigServer {
             for( String line : scriptCache ) {
                 accuQuery.append( line + "\n" );
             }
-            
+
             return accuQuery.toString();
         }
-        
+
         private void compile() throws IOException {
             compile( lp );
             currDAG.postProcess();
         }
-        
+
         private void compile(LogicalPlan lp) throws FrontendException  {
             new ColumnAliasConversionVisitor(lp).visit();
             new SchemaAliasVisitor(lp).visit();
             new ScalarVisitor(lp, pigContext, scope).visit();
-            
+
             // TODO: move optimizer here from HExecuteEngine.
             // TODO: input/output validation visitor
 
             CompilationMessageCollector collector = new CompilationMessageCollector() ;
-            
+
             new TypeCheckingRelVisitor( lp, collector).visit();
             if(aggregateWarning) {
                 CompilationMessageCollector.logMessages(collector, MessageType.Warning, aggregateWarning, log);
@@ -1653,7 +1624,7 @@ public class PigServer {
                     CompilationMessageCollector.logAllMessages(collector, log);
                 }
             }
-            
+
             new UnionOnSchemaSetter( lp ).visit();
             new CastLineageSetter(lp, collector).visit();
             new ScalarVariableValidator(lp).visit();
@@ -1670,7 +1641,7 @@ public class PigServer {
             // the load/store func is not reversible (or they are
             // different functions), we connect the store and the load
             // to remember the dependency.
-            
+
             Set<LOLoad> loadOps = new HashSet<LOLoad>();
             List<Operator> sources = lp.getSources();
             for (Operator source : sources) {
@@ -1678,7 +1649,7 @@ public class PigServer {
                     loadOps.add((LOLoad)source);
                 }
             }
-            
+
             Set<LOStore> storeOps = new HashSet<LOStore>();
             List<Operator> sinks = lp.getSinks();
             for (Operator sink : sinks) {
@@ -1687,7 +1658,7 @@ public class PigServer {
                 }
             }
 
-            
+
             for (LOLoad load : loadOps) {
                 for (LOStore store : storeOps) {
                     String ifile = load.getFileSpec().getFileName();
@@ -1705,13 +1676,13 @@ public class PigServer {
                 }
             }
         }
-        
+
 
         protected Graph duplicate() {
             // There are two choices on how we duplicate the logical plan
             // 1 - we really clone each operator and connect up the cloned operators
             // 2 - we cache away the script till the point we need to clone
-            // and then simply re-parse the script. 
+            // and then simply re-parse the script.
             // The latter approach is used here
             // FIXME: There is one open issue with this now:
             // Consider the following script:
@@ -1729,7 +1700,7 @@ public class PigServer {
             // parse each line of the cached script
             int lineNumber = 1;
 
-            // create data structures needed for parsing        
+            // create data structures needed for parsing
             Graph graph = new Graph(isBatchOn());
             graph.processedStores = processedStores;
             graph.fileNameMap = new HashMap<String, String>(fileNameMap);
@@ -1737,7 +1708,7 @@ public class PigServer {
             try {
                 for (Iterator<String> it = scriptCache.iterator(); it.hasNext(); lineNumber++) {
                     // always doing registerQuery irrespective of the batch mode
-                    // TODO: Need to figure out if anything different needs to happen if batch 
+                    // TODO: Need to figure out if anything different needs to happen if batch
                     // mode is not on
                     // Don't have to do the validation again, so set validateEachStatement param to false
                     graph.registerQuery(it.next(), lineNumber, false);
@@ -1753,7 +1724,7 @@ public class PigServer {
 
     /**
      * This can be called to indicate if the query is being parsed/compiled
-     * in a mode that expects each statement to be validated as it is 
+     * in a mode that expects each statement to be validated as it is
      * entered, instead of just doing it once for whole script.
      * @param validateEachStatement
      */

Modified: pig/trunk/test/org/apache/pig/test/PigExecTestCase.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/PigExecTestCase.java?rev=1359887&r1=1359886&r2=1359887&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/PigExecTestCase.java (original)
+++ pig/trunk/test/org/apache/pig/test/PigExecTestCase.java Tue Jul 10 20:32:46 2012
@@ -29,25 +29,25 @@ import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4; 
+import org.junit.runners.JUnit4;
 
-@RunWith(JUnit4.class)   
+@RunWith(JUnit4.class)
 public abstract class PigExecTestCase extends TestCase {
 
     protected final Log log = LogFactory.getLog(getClass());
-    
+
     protected ExecType execType = LOCAL;
-    
+
     static MiniCluster cluster;
     protected PigServer pigServer;
-    
+
     @Before
     @Override
     public void setUp() throws Exception {
-        
+
         String execTypeString = System.getProperty("test.exectype");
         if(execTypeString!=null && execTypeString.length()>0){
-            execType = PigServer.parseExecType(execTypeString);
+            execType = ExecType.fromString(execTypeString);
         }
         if(execType == MAPREDUCE) {
             cluster = MiniCluster.buildCluster();
@@ -62,7 +62,7 @@ public abstract class PigExecTestCase ex
     public void tearDown() throws Exception {
         pigServer.shutdown();
     }
-    
+
     @AfterClass
     public static void oneTimeTearDown() throws Exception {
         if(cluster != null)

Modified: pig/trunk/test/org/apache/pig/test/TestParser.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestParser.java?rev=1359887&r1=1359886&r2=1359887&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestParser.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestParser.java Tue Jul 10 20:32:46 2012
@@ -46,19 +46,19 @@ import org.junit.runners.JUnit4;
 public class TestParser extends TestCase {
 
 protected final Log log = LogFactory.getLog(getClass());
-    
+
     protected ExecType execType = MAPREDUCE;
-    
+
     private static MiniCluster cluster;
     protected PigServer pigServer;
-    
+
     @Before
     @Override
     public void setUp() throws Exception {
-        
+
         String execTypeString = System.getProperty("test.exectype");
         if(execTypeString!=null && execTypeString.length()>0){
-            execType = PigServer.parseExecType(execTypeString);
+            execType = ExecType.fromString(execTypeString);
         }
         if(execType == MAPREDUCE) {
             cluster = MiniCluster.buildCluster();
@@ -68,6 +68,7 @@ protected final Log log = LogFactory.get
         }
     }
 
+    @Override
     @After
     public void tearDown() throws Exception {
         pigServer.shutdown();
@@ -78,7 +79,7 @@ protected final Log log = LogFactory.get
         if(cluster != null)
             cluster.shutDown();
     }
-    
+
     @Test
     public void testLoadingNonexistentFile() throws ExecException, IOException {
         try {
@@ -91,39 +92,39 @@ protected final Log log = LogFactory.get
         } catch (IOException io) {
         }
     }
-    
+
     @Test
     public void testRemoteServerList() throws ExecException, IOException {
         try {
             Properties pigProperties = pigServer.getPigContext().getProperties();
             pigProperties.setProperty("fs.default.name", "hdfs://a.com:8020");
             Configuration conf;
-            
+
             pigServer.registerQuery("a = load '/user/pig/1.txt';");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(conf.get("mapreduce.job.hdfs-servers")==null||
                     conf.get("mapreduce.job.hdfs-servers").equals("hdfs://a.com:8020"));
-            
+
             pigServer.registerQuery("a = load 'hdfs://a.com/user/pig/1.txt';");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(pigProperties.getProperty("mapreduce.job.hdfs-servers")==null||
                     conf.get("mapreduce.job.hdfs-servers").equals("hdfs://a.com:8020"));
-            
+
             pigServer.registerQuery("a = load 'har:///1.txt';");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(pigProperties.getProperty("mapreduce.job.hdfs-servers")==null||
                     conf.get("mapreduce.job.hdfs-servers").equals("hdfs://a.com:8020"));
-            
+
             pigServer.registerQuery("a = load 'hdfs://b.com/user/pig/1.txt';");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(conf.get("mapreduce.job.hdfs-servers")!=null &&
                     conf.get("mapreduce.job.hdfs-servers").contains("hdfs://b.com"));
-            
+
             pigServer.registerQuery("a = load 'har://hdfs-c.com/user/pig/1.txt';");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(conf.get("mapreduce.job.hdfs-servers")!=null &&
                     conf.get("mapreduce.job.hdfs-servers").contains("hdfs://c.com"));
-            
+
             pigServer.registerQuery("a = load 'hdfs://d.com:8020/user/pig/1.txt';");
             conf = ConfigurationUtil.toConfiguration(pigProperties);
             assertTrue(conf.get("mapreduce.job.hdfs-servers")!=null &&
@@ -133,7 +134,7 @@ protected final Log log = LogFactory.get
         } catch (IOException io) {
         }
     }
-    
+
     @Test
     public void testRemoteServerList2() throws ExecException, IOException {
 
@@ -145,7 +146,7 @@ protected final Log log = LogFactory.get
 
         Data data = Storage.resetData(pigServer.getPigContext());
         data.set("/user/pig/1.txt");// no data
-        
+
         pigServer.registerQuery("a = load '/user/pig/1.txt' using mock.Storage;");
         pigServer.registerQuery("store a into '/user/pig/1.txt';");
 

Modified: pig/trunk/test/org/apache/pig/test/TestProjectRange.java
URL: http://svn.apache.org/viewvc/pig/trunk/test/org/apache/pig/test/TestProjectRange.java?rev=1359887&r1=1359886&r2=1359887&view=diff
==============================================================================
--- pig/trunk/test/org/apache/pig/test/TestProjectRange.java (original)
+++ pig/trunk/test/org/apache/pig/test/TestProjectRange.java Tue Jul 10 20:32:46 2012
@@ -23,9 +23,7 @@ import static org.apache.pig.ExecType.MA
 import static org.junit.Assert.assertEquals;
 
 import java.io.File;
-import java.io.FileWriter;
 import java.io.IOException;
-import java.io.PrintWriter;
 import java.util.Iterator;
 import java.util.List;
 
@@ -78,7 +76,7 @@ public class TestProjectRange  {
         String execTypeString = System.getProperty("test.exectype");
 
         if(execTypeString!=null && execTypeString.length()>0){
-            execType = PigServer.parseExecType(execTypeString);
+            execType = ExecType.fromString(execTypeString);
         }
 
         String[] input = {"10\t20\t30\t40\t50", "11\t21\t31\t41\t51"};
@@ -122,59 +120,59 @@ public class TestProjectRange  {
     public void testFullRangeForeachWSchema() throws IOException, ParserException {
 
         String query;
-        
-        //specifying the new aliases 
+
+        //specifying the new aliases
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate a .. c as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int, bb : float, cc : int", query, "f");
 
         //specifying the new aliases - refer to column by pos
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate $0 .. $2 as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int, bb : float, cc : int", query, "f");
 
         //column with pos , name
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate $0 .. c as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int, bb : float, cc : int", query, "f");
-        
-        
-        //specifying the new aliases 
+
+
+        //specifying the new aliases
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate b .. d as (bb, cc, dd);"
-            ; 
+            ;
         compileAndCompareSchema("bb : float, cc : int, dd : int", query, "f");
 
         //begin, end of range is same
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate b .. b as (bb), $2 .. $2;"
-            ; 
-        compileAndCompareSchema("bb : float, c : int", query, "f");        
-        
+            ;
+        compileAndCompareSchema("bb : float, c : int", query, "f");
+
         // without aliases - two projections
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : int, c : int, d : int, e : int);"
             + "f = foreach l1 generate a .. c, d .. e ;"
-            ; 
+            ;
         compileAndCompareSchema("a : int, b : int, c : int, d : int, e : int", query, "f");
-              
+
         // without aliases
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : int, c : int, d : int, e : int);"
             + "f = foreach l1 generate a .. c ;"
-            ; 
+            ;
         compileAndCompareSchema("a : int, b : int, c : int", query, "f");
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,20,30)",
@@ -189,7 +187,7 @@ public class TestProjectRange  {
     throws IOException, ParserException {
 
         Schema expectedSch = null;
-        
+
         if(expectedSchStr != null)
             expectedSch = Utils.getSchemaFromString(expectedSchStr);
 
@@ -213,44 +211,44 @@ public class TestProjectRange  {
     @Test
     public void testEndRangeForeachWSchema() throws IOException, ParserException {
 
-        //specifying the new aliases 
+        //specifying the new aliases
         String query;
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  .. c as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int, bb : float, cc : int", query, "f");
 
         //col position
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  .. $2 as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int, bb : float, cc : int", query, "f");
 
-        //end is the beginning! 
+        //end is the beginning!
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  .. $0 as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int", query, "f");
-        
-        
+
+
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  .. c as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int, bb : float, cc : int", query, "f");
-        
+
         // without aliases
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : int, c : int, d : long, e : int);"
             + "f = foreach l1 generate  .. $3 ;"
-            ; 
+            ;
         compileAndCompareSchema("a : int, b : int, c : int, d : long", query, "f");
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,20,30,40L)",
@@ -269,12 +267,12 @@ public class TestProjectRange  {
     @Test
     public void testStartRangeForeachWSchema() throws IOException, ParserException {
 
-        //specifying the new aliases 
+        //specifying the new aliases
         String query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' " +
             		"as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  c ..  as (aa, bb, cc);"
-            ; 
+            ;
         compileAndCompareSchema("aa : int, bb : int, cc : int", query, "f");
 
         // without aliases
@@ -282,43 +280,43 @@ public class TestProjectRange  {
             "  l1 = load '" + INP_FILE_5FIELDS + "' " +
                         "as (a : int, b : int, c : int, d : long, e : int);"
             + "f = foreach l1 generate  $1 ..  ;"
-            ; 
+            ;
         compileAndCompareSchema("b : int, c : int, d : long, e : int", query, "f");
-        
-        //start with last column - beginning is the end! 
+
+        //start with last column - beginning is the end!
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' " +
                         "as (a : int, b : int, c : int, d : long, e : int);"
             + "f = foreach l1 generate  e ..  ;"
-            ; 
+            ;
         compileAndCompareSchema("e : int", query, "f");
-        
+
         //specifying the new aliases for one
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' " +
             		"as (a : int, b : long, c : int, d : int, e : int);"
             + "f = foreach l1 generate  c ..  as (aa, bb, cc), b .. ;"
-            ; 
+            ;
         compileAndCompareSchema(
                 "aa : int, bb : int, cc : int, b : long, c : int, d : int, e : int",
                 query,
                 "f"
-        );        
+        );
 
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(30,40,50,20L,30,40,50)",
                             "(31,41,51,21L,31,41,51)",
-                            
+
                     });
         Util.checkQueryOutputsAfterSort(it, expectedRes);
 
     }
-    
-    
+
+
     /**
      * Test multiple different types of range-project with foreach
      * @throws IOException
@@ -327,24 +325,24 @@ public class TestProjectRange  {
    @Test
     public void testMixRangeForeachWSchema() throws IOException, ParserException {
 
-        //specifying the new aliases 
+        //specifying the new aliases
         String query;
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  .. b, c .. d, d .. as (aa, bb);"
-            ; 
+            ;
         compileAndCompareSchema("a : int, b : float, c : int, d : int, aa : int, bb : int", query, "f");
 
-        
+
         // without aliases
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : int, c : int, d : long, e : int);"
             + "f = foreach l1 generate ..$0 as (first), e.. as (last), d ..,  .. b ;"
-            ; 
+            ;
         compileAndCompareSchema("first : int, last : int, d : long, e : int, a : int, b : int", query, "f");
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,50,40L,50,10,20)",
@@ -353,7 +351,7 @@ public class TestProjectRange  {
         Util.checkQueryOutputsAfterSort(it, expectedRes);
 
     }
-    
+
     /**
      * -ve test cases
      * @throws IOException
@@ -365,19 +363,19 @@ public class TestProjectRange  {
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  $3 .. $1;"
-            ; 
+            ;
         Util.checkExceptionMessage(query, "f",
                 "start column appears after end column in range projection");
 
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' as (a : int, b : float, c : int, d : int, e : int);"
             + "f = foreach l1 generate  c .. b;"
-            ; 
+            ;
         Util.checkExceptionMessage(query, "f",
                 "start column appears after end column in range projection");
     }
 
-    
+
     /**
      * -ve test cases
      * @throws IOException
@@ -389,18 +387,18 @@ public class TestProjectRange  {
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + "f = foreach l1 generate  $3 .. $1;"
-            ; 
+            ;
         Util.checkExceptionMessage(query, "f",
                 "start column appears after end column in range projection");
 
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "' ;"
             + "f = foreach l1 generate  a .. b;"
-            ; 
+            ;
         Util.checkExceptionMessage(query, "f",
                 "Invalid field projection. Projected field [a] does not exist.");
     }
-    
+
     /**
      * Test foreach without schema
      * @throws IOException
@@ -410,21 +408,21 @@ public class TestProjectRange  {
     public void testStartRangeForeachNOSchema() throws IOException, ParserException {
 
         String query;
-        
+
         // without aliases
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + "f = foreach l1 generate ..$3  as (a,b,c,d);"
-            ; 
+            ;
         compileAndCompareSchema("a : bytearray,b : bytearray,c : bytearray,d : bytearray", query, "f");
-        
-        
+
+
         Util.registerMultiLineQuery(pigServer, query);
 
         pigServer.explain("f", System.err);
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStringAsByteArray(
                     new String[] {
                             "('10','20','30','40')",
@@ -444,21 +442,21 @@ public class TestProjectRange  {
     public void testMixRangeForeachNOSchema() throws IOException, ParserException {
 
         String query;
-        
+
         // without aliases
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + "f = foreach l1 generate ..$0 as (first), $4.. as (last), $3 ..,  .. $1 ;"
-            ; 
+            ;
         compileAndCompareSchema((Schema)null, query, "f");
-        
-        
+
+
         Util.registerMultiLineQuery(pigServer, query);
 
         pigServer.explain("f", System.err);
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStringAsByteArray(
                     new String[] {
                             "('10','50','40','50','10','20')",
@@ -476,19 +474,19 @@ public class TestProjectRange  {
     @Test
     public void testRangeForeachWFilterNOSchema() throws IOException, ParserException {
         String query;
-        
+
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + "f = foreach l1 generate ..$0 as (first), $4.. as (last), $3 ..,  .. $1 ;"
             + " fil = filter f by $0 > 10;"
-            ; 
-        
+            ;
+
         Util.registerMultiLineQuery(pigServer, query);
 
         pigServer.explain("fil", System.err);
         Iterator<Tuple> it = pigServer.openIterator("fil");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStringAsByteArray(
                     new String[] {
                             "('11','51','41','51','11','21')",
@@ -496,91 +494,91 @@ public class TestProjectRange  {
         Util.checkQueryOutputsAfterSort(it, expectedRes);
 
     }
-    
+
     @Test
     public void testRangeOrderByWSchema() throws IOException, ParserException{
         String query;
-        
+
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                         "' as (a : int, b : long, c : int, d : int, e : int);"
                 + " o = order l1 by  .. $2 DESC ;"
-                ; 
+                ;
             compileAndCompareSchema("a : int, b : long, c : int, d : int, e : int", query, "o");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {false,false,false};
             checkNumExpressionPlansForSort(lp, 3, isAsc);
         }
-        
+
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                         "' as (a : int, b : long, c : int, d : int, e : int);"
                 + " o = order l1 by  $3 ..  ;"
-                ; 
+                ;
             compileAndCompareSchema("a : int, b : long, c : int, d : int, e : int", query, "o");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {true, true};
             checkNumExpressionPlansForSort(lp, 2, isAsc);
         }
-        
+
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                         "' as (a : int, b : long, c : int, d : int, e : int);"
                 + " o = order l1 by  d .. DESC  ;"
-                ; 
+                ;
             compileAndCompareSchema("a : int, b : long, c : int, d : int, e : int", query, "o");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {false, false};
             checkNumExpressionPlansForSort(lp, 2, isAsc);
         }
-        
+
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                         "' as (a : int, b : long, c : int, d : int, e : int);"
                 + " f = foreach l1 generate *;"
                 + " o = order f by  $0 .. c ASC  ;"
                 + " lim = limit o 10; ;"
-                ; 
+                ;
             compileAndCompareSchema("a : int, b : long, c : int, d : int, e : int", query, "lim");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {true, true, true};
             checkNumExpressionPlansForSort(lp, 3, isAsc);
         }
 
         query =
-            "  l1 = load '" + INP_FILE_5FIELDS + 
+            "  l1 = load '" + INP_FILE_5FIELDS +
                     "' as (a : int, b : long, c : int, d : int, e : int);"
             + " o = order l1 by $0 .. $4  ;"
-            ; 
+            ;
         compileAndCompareSchema("a : int, b : long, c : int, d : int, e : int", query, "o");
 
         //check number of sort expression plans
-       
+
         LogicalPlan lp = createAndProcessLPlan(query);
         boolean[] isAsc = {true,true,true,true,true};
         checkNumExpressionPlansForSort(lp, 5, isAsc);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
         Iterator<Tuple> it = pigServer.openIterator("o");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,20,30,40,50)",
@@ -588,8 +586,8 @@ public class TestProjectRange  {
                     });
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
-    
+
+
     /**
      * Test nested order-by with schema
      * @throws IOException
@@ -598,55 +596,55 @@ public class TestProjectRange  {
     @Test
     public void testRangeOrderByNestedWSchema() throws IOException, ParserException{
         String query;
-        
+
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                         "' as (a : int, b : long, c : int, d : int, e : int);"
                 + " g = group l1 by a;"
                 + " f = foreach g { o = order l1 by  .. $2 DESC; generate group, o;}"
-                ; 
+                ;
             String expectedSchStr = "g : int,o: {t : (a: int,b: long,c: int,d: int,e: int)}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "f");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {false,false,false};
             checkNumExpressionPlansForSort(lp, 3, isAsc);
         }
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                         "' as (a : int, b : long, c : int, d : int, e : int);"
                 + " g = group l1 by a;"
                 + " f = foreach g { o = order l1 by  d .. ; generate group, o;}"
-                ; 
+                ;
             String expectedSchStr = "g : int,o: {t : (a: int,b: long,c: int,d: int,e: int)}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "f");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {true,true};
             checkNumExpressionPlansForSort(lp, 2, isAsc);
         }
         {
-            
+
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                         "' as (a : int, b : long, c : int, d : int, e : int);"
                 + " g = group l1 by a;"
                 + " f = foreach g { o = order l1 by  $2 .. $3 ASC, $1..c DESC; generate group, o;}"
-                ; 
+                ;
             String expectedSchStr = "g : int,o: {t : (a: int,b: long,c: int,d: int,e: int)}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "f");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {true,true,false,false};
             checkNumExpressionPlansForSort(lp, 4, isAsc);
@@ -654,26 +652,26 @@ public class TestProjectRange  {
         }
 
         query =
-            "  l1 = load '" + INP_FILE_5FIELDS + 
+            "  l1 = load '" + INP_FILE_5FIELDS +
                     "' as (a : int, b : long, c : int, d : int, e : int);"
             + " g = group l1 by a;"
             + " f = foreach g { o = order l1 by  $2 .. $3 DESC, $1 ASC; generate group, o;}"
-            ; 
-        
+            ;
+
         String expectedSchStr = "g : int,o: {t : (a: int,b: long,c: int,d: int,e: int)}";
         Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
         compileAndCompareSchema(expectedSch, query, "f");
-        
+
         //check number of sort expression plans
                LogicalPlan lp = createAndProcessLPlan(query);
         boolean[] isAsc = {false,false,true};
         checkNumExpressionPlansForSort(lp, 3, isAsc);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,{(10,20,30,40,50)})",
@@ -681,7 +679,7 @@ public class TestProjectRange  {
                     });
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     /**
      * Test nested order-by without schema
      * @throws IOException
@@ -690,19 +688,19 @@ public class TestProjectRange  {
     @Test
     public void testRangeOrderByNestedNOSchema() throws IOException, ParserException{
         String query;
-        
+
         {
             query =
                 "  l1 = load '" + INP_FILE_5FIELDS + "';"
                 + " g = group l1 by $0;"
                 + " f = foreach g { o = order l1 by  .. $2 DESC; generate group, o;}"
-                ; 
+                ;
             String expectedSchStr = "g : bytearray, o: {t : ()}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "f");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {false,false,false};
             checkNumExpressionPlansForSort(lp, 3, isAsc);
@@ -712,31 +710,31 @@ public class TestProjectRange  {
                 "  l1 = load '" + INP_FILE_5FIELDS + "';"
                 + " g = group l1 by $0;"
                 + " f = foreach g { o = order l1 by  $3 .. ; generate group, o;}"
-                ; 
+                ;
             String expectedSchStr = "g : bytearray, o: {t : ()}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "f");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             //project to end can't be expanded
             boolean[] isAsc = {true};
             checkNumExpressionPlansForSort(lp, 1, isAsc);
         }
         {
-            
+
             query =
                 "  l1 = load '" + INP_FILE_5FIELDS + "';"
                 + " g = group l1 by $1;"
                 + " f = foreach g { o = order l1 by  $2 .. $3 ASC, $1 .. $2 DESC; generate group, o;}"
-                ; 
+                ;
             String expectedSchStr = "g : bytearray, o: {t : ()}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "f");
 
             //check number of sort expression plans
-           
+
             LogicalPlan lp = createAndProcessLPlan(query);
             boolean[] isAsc = {true,true,false,false};
             checkNumExpressionPlansForSort(lp, 4, isAsc);
@@ -747,38 +745,38 @@ public class TestProjectRange  {
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + " g = group l1 by 1;"
             + " f = foreach g { o = order l1 by  $2 .. $3 desc; generate group, o;}"
-            ; 
+            ;
         String expectedSchStr = "g : int, o: {t : ()}";
         Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
         compileAndCompareSchema(expectedSch, query, "f");
         //check number of sort expression plans
-       
+
         LogicalPlan lp = createAndProcessLPlan(query);
         boolean[] isAsc = {false,false};
         checkNumExpressionPlansForSort(lp, 2, isAsc);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
         Iterator<Tuple> it = pigServer.openIterator("f");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(1,{(11,21,31,41,51),(10,20,30,40,50)})",
                     });
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     private LOSort checkNumExpressionPlansForSort(LogicalPlan lp, int numPlans, boolean[] isAsc) {
         Class<?> sortClass = org.apache.pig.newplan.logical.relational.LOSort.class;
         LOSort sort = (LOSort) NewLogicalPlanUtil.getRelOpFromPlan(lp, sortClass);
         assertEquals("number of sort col plans", numPlans, sort.getSortColPlans().size());
-        
+
         List<Boolean> ascCols = sort.getAscendingCols();
         for(int i = 0; i < ascCols.size(); i++){
             assertEquals("ascending order", isAsc[i], ascCols.get(i));
         }
-        
+
         return sort;
     }
 
@@ -791,10 +789,10 @@ public class TestProjectRange  {
         CompilationMessageCollector collector = new CompilationMessageCollector() ;
         new TypeCheckingRelVisitor( lp, collector).visit();
         new UnionOnSchemaSetter( lp ).visit();
-        new CastLineageSetter(lp, collector).visit(); 
+        new CastLineageSetter(lp, collector).visit();
 
         return lp;
-       
+
     }
 
     private LogicalPlan generateLogicalPlan(String query) {
@@ -812,23 +810,23 @@ public class TestProjectRange  {
         String query;
 
         query =
-            "  l1 = load '" + INP_FILE_5FIELDS + 
+            "  l1 = load '" + INP_FILE_5FIELDS +
                     "' as (a : int, b : long, c : int, d : int, e : int);"
             + " o = order l1 by  b .. c, d .. DESC,  a DESC;"
-            ; 
+            ;
         compileAndCompareSchema("a : int, b : long, c : int, d : int, e : int", query, "o");
 
         //check number of sort expression plans
-       
+
         LogicalPlan lp = createAndProcessLPlan(query);
         boolean[] isAsc = {true,true,false,false,false};
         checkNumExpressionPlansForSort(lp, 5, isAsc);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
         Iterator<Tuple> it = pigServer.openIterator("o");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,20,30,40,50)",
@@ -845,21 +843,21 @@ public class TestProjectRange  {
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + " o = order l1 by  $1 .. $2 DESC,  $0 , $4 .. DESC;"
-            ; 
+            ;
         compileAndCompareSchema((Schema)null, query, "o");
 
         //check number of sort expression plans
-       
+
         LogicalPlan lp = createAndProcessLPlan(query);
         boolean[] isAsc = {false, false,true,false};
         checkNumExpressionPlansForSort(lp, 4, isAsc);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
         pigServer.explain("o", System.err);
         Iterator<Tuple> it = pigServer.openIterator("o");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(11,21,31,41,51)",
@@ -867,7 +865,7 @@ public class TestProjectRange  {
                     });
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     @Test
     public void testRangeOrderByStartNOSchema() throws IOException, ParserException{
         String query;
@@ -875,21 +873,21 @@ public class TestProjectRange  {
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + " o = order l1 by $3 .. DESC;"
-            ; 
+            ;
         compileAndCompareSchema((Schema)null, query, "o");
 
         //check number of sort expression plans
-       
+
         LogicalPlan lp = createAndProcessLPlan(query);
         boolean[] isAsc = {false};
         checkNumExpressionPlansForSort(lp, 1, isAsc);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
         pigServer.explain("o", System.err);
         Iterator<Tuple> it = pigServer.openIterator("o");
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(11,21,31,41,51)",
@@ -897,7 +895,7 @@ public class TestProjectRange  {
                     });
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     @Test
     public void testRangeOrderByStartNegNOSchema() throws IOException, ParserException{
         String query;
@@ -905,75 +903,75 @@ public class TestProjectRange  {
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';"
             + " o = order l1 by $3 .. DESC, $1;"
-            ; 
+            ;
         Util.checkExceptionMessage(query, "o","Project-range to end (eg. x..)" +
                 " is supported in order-by only as last sort column");
     }
-    
+
     @Test
     public void testRangeGroupWSchema() throws IOException, ParserException{
         String query;
 
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                 "' as (a : int, b : long, c : int, d : int, e : int);" +
-                "  l2 = load '" + INP_FILE_5FIELDS + 
+                "  l2 = load '" + INP_FILE_5FIELDS +
                 "' as (a : int, b : long, c : int, d : int, e : int);" +
                 "  g = group l1 by   d ..,  l2 by d ..;"
-                ; 
+                ;
             String expectedSchStr = "grp: (d: int,e : int)," +
                             "l1: {t : (a: int,b: long,c: int,d: int,e: int)}," +
                             "l2: {t : (a: int,b: long,c: int,d: int,e: int)}";
-            
+
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "g");
             //check number of group expression plans
             LogicalPlan lp = createAndProcessLPlan(query);
             checkNumExpressionPlansForGroup(lp, 2);
         }
-        
+
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                 "' as (a : int, b : long, c : int, d : int, e : int);" +
-                "  l2 = load '" + INP_FILE_5FIELDS + 
+                "  l2 = load '" + INP_FILE_5FIELDS +
                 "' as (a : int, b : long, c : int, d : int, e : int);" +
                 "  g = group l1 by   c .. $3,  l2 by $3..$4;"
-                ; 
+                ;
             String expectedSchStr = "grp: (c: int,d : int)," +
                             "l1: {t : (a: int,b: long,c: int,d: int,e: int)}," +
                             "l2: {t : (a: int,b: long,c: int,d: int,e: int)}";
-            
+
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             compileAndCompareSchema(expectedSch, query, "g");
             //check number of group expression plans
             LogicalPlan lp = createAndProcessLPlan(query);
             checkNumExpressionPlansForGroup(lp, 2);
         }
-        
+
         query =
-            "  l1 = load '" + INP_FILE_5FIELDS + 
+            "  l1 = load '" + INP_FILE_5FIELDS +
             "' as (a : int, b : long, c : int, d : int, e : int);" +
-            "  l2 = load '" + INP_FILE_5FIELDS + 
+            "  l2 = load '" + INP_FILE_5FIELDS +
             "' as (a : int, b : long, c : int, d : int, e : int);" +
             "  g = group l1 by   .. c,  l2 by .. c;"
-            ; 
+            ;
         String expectedSchStr = "grp: (a: int,b: long,c: int)," +
         		"l1: {t : (a: int,b: long,c: int,d: int,e: int)}," +
         		"l2: {t : (a: int,b: long,c: int,d: int,e: int)}";
-        
+
         Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
-        compileAndCompareSchema(expectedSch, query, "g");            
+        compileAndCompareSchema(expectedSch, query, "g");
 
 
         //check number of group expression plans
         LogicalPlan lp = createAndProcessLPlan(query);
         checkNumExpressionPlansForGroup(lp, 3);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "((10,20,30),{(10,20,30,40,50)},{(10,20,30,40,50)})",
@@ -1005,42 +1003,42 @@ public class TestProjectRange  {
     private LOCogroup checkNumExpressionPlansForGroup(LogicalPlan lp, int numPlans) {
         Class<?> groupClass = org.apache.pig.newplan.logical.relational.LOCogroup.class;
         LOCogroup grp = (LOCogroup) NewLogicalPlanUtil.getRelOpFromPlan(lp, groupClass);
-        
+
         for( int inp : grp.getExpressionPlans().keySet()){
             List<LogicalExpressionPlan> plans = grp.getExpressionPlans().get(inp);
             assertEquals("number of group-by plans", numPlans, plans.size());
         }
-        
+
         return grp;
     }
-    
+
     @Test
     public void testRangeCoGroupMixWSchema() throws IOException, ParserException{
         String query;
 
         query =
-            "  l1 = load '" + INP_FILE_5FIELDS + 
+            "  l1 = load '" + INP_FILE_5FIELDS +
             "' as (a : int, b : long, c : int, d : int, e : int);" +
-            "  l2 = load '" + INP_FILE_5FIELDS + 
+            "  l2 = load '" + INP_FILE_5FIELDS +
             "' as (a : int, b : long, c : int, d : int, e : int);" +
             "  g = group l1 by  (a + b, c .. d, e.. ),  l2 by ($0 + $1, c..d, $4..);"
-            ; 
+            ;
         String expectedSchStr = "grp: (x : long, c :int , d :int, e : int)," +
                         "l1: {t : (a: int,b: long,c: int,d: int,e: int)}," +
                         "l2: {t : (a: int,b: long,c: int,d: int,e: int)}";
-        
+
         Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
         expectedSch.getField(0).schema.getField(0).alias = null;
-        compileAndCompareSchema(expectedSch, query, "g");            
+        compileAndCompareSchema(expectedSch, query, "g");
 
 
         //check number of group expression plans
         LogicalPlan lp = createAndProcessLPlan(query);
         checkNumExpressionPlansForGroup(lp, 4);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "((30,30,40,50),{(10,20,30,40,50)},{(10,20,30,40,50)})",
@@ -1049,49 +1047,49 @@ public class TestProjectRange  {
         Iterator<Tuple> it = pigServer.openIterator("g");
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     @Test
     public void testRangeGroupMixWSchema() throws IOException, ParserException{
         String query;
 
         {
             query =
-                "  l1 = load '" + INP_FILE_5FIELDS + 
+                "  l1 = load '" + INP_FILE_5FIELDS +
                 "' as (a : int, b : long, c : int, d : int, e : int);" +
                 "  g = group l1 by  b .. c;"
-                ; 
+                ;
             String expectedSchStr = "grp: (b : long, c :int)," +
                             "l1: {t : (a: int,b: long,c: int,d: int,e: int)}";
-            
+
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
-            compileAndCompareSchema(expectedSch, query, "g");            
+            compileAndCompareSchema(expectedSch, query, "g");
 
 
             //check number of group expression plans
             LogicalPlan lp = createAndProcessLPlan(query);
             checkNumExpressionPlansForGroup(lp, 2);
         }
-        
+
         query =
-            "  l1 = load '" + INP_FILE_5FIELDS + 
+            "  l1 = load '" + INP_FILE_5FIELDS +
             "' as (a : int, b : long, c : int, d : int, e : int);" +
             "  g = group l1 by  $2 .. ;" +
             "  lim = limit g 2;"
-            ; 
+            ;
         String expectedSchStr = "grp: (c :int , d :int, e : int)," +
                         "l1: {t : (a: int,b: long,c: int,d: int,e: int)}";
-        
+
         Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
-        compileAndCompareSchema(expectedSch, query, "lim");            
+        compileAndCompareSchema(expectedSch, query, "lim");
 
 
         //check number of group expression plans
         LogicalPlan lp = createAndProcessLPlan(query);
         checkNumExpressionPlansForGroup(lp, 3);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "((30,40,50),{(10,20,30,40,50)})",
@@ -1100,8 +1098,8 @@ public class TestProjectRange  {
         Iterator<Tuple> it = pigServer.openIterator("lim");
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
-    
+
+
     @Test
     public void testRangeGroupMixNOSchema() throws IOException, ParserException{
         String query;
@@ -1110,50 +1108,50 @@ public class TestProjectRange  {
             query =
                 "  l1 = load '" + INP_FILE_5FIELDS + "';" +
                 "  g = group l1 by  .. $2;"
-                ; 
+                ;
             String expectedSchStr = "g : (duma, dumb, dumc), l1: {t : ()}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
             setAliasesToNull(expectedSch.getField(0).schema);
-            compileAndCompareSchema(expectedSch, query, "g");            
+            compileAndCompareSchema(expectedSch, query, "g");
 
 
             //check number of group expression plans
             LogicalPlan lp = createAndProcessLPlan(query);
             checkNumExpressionPlansForGroup(lp, 3);
         }
-        
+
         {
             query =
                 "  l1 = load '" + INP_FILE_5FIELDS + "';" +
                 "  g = group l1 by  $3 .. $3;"
-                ; 
+                ;
             String expectedSchStr = "g : bytearray, l1: {t : ()}";
             Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
-            compileAndCompareSchema(expectedSch, query, "g");            
+            compileAndCompareSchema(expectedSch, query, "g");
 
 
             //check number of group expression plans
             LogicalPlan lp = createAndProcessLPlan(query);
             checkNumExpressionPlansForGroup(lp, 1);
         }
-        
+
         query =
             "  l1 = load '" + INP_FILE_5FIELDS + "';" +
-            "  g = group l1 by  $2 .. ;" 
-            ; 
+            "  g = group l1 by  $2 .. ;"
+            ;
         String expectedSchStr = "grp: (), l1: {t : ()}";
-        
+
         Schema expectedSch = getCleanedGroupSchema(expectedSchStr);
-        compileAndCompareSchema(expectedSch, query, "g");            
+        compileAndCompareSchema(expectedSch, query, "g");
 
 
         //check number of group expression plans
         LogicalPlan lp = createAndProcessLPlan(query);
         checkNumExpressionPlansForGroup(lp, 1);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "((30,40,50),{(10,20,30,40,50)})",
@@ -1162,7 +1160,7 @@ public class TestProjectRange  {
         Iterator<Tuple> it = pigServer.openIterator("g");
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     private void setAliasesToNull(Schema schema) {
        for(FieldSchema fs : schema.getFields()){
            fs.alias = null;
@@ -1174,25 +1172,25 @@ public class TestProjectRange  {
         String query;
 
         query =
-            "  l1 = load '" + INP_FILE_5FIELDS + 
+            "  l1 = load '" + INP_FILE_5FIELDS +
             "' as (a : int, b : long, c : int, d : int, e : int);" +
-            "  l2 = load '" + INP_FILE_5FIELDS + 
+            "  l2 = load '" + INP_FILE_5FIELDS +
             "' as (a : int, b : long, c : int, d : int, e : int);" +
             "  j = join l1 by  (a + b, c .. d, e.. ),  l2 by ($0 + $1, c..d, $4..);"
-            ; 
+            ;
         String expectedSchStr = "l1::a: int, l1::b: long, l1::c: int, l1::d: int, l1::e: int," +
                         "l2::a: int, l2::b: long, l2::c: int, l2::d: int, l2::e: int";
 
-        compileAndCompareSchema(expectedSchStr, query, "j");            
+        compileAndCompareSchema(expectedSchStr, query, "j");
 
 
         //check number of group expression plans
         LogicalPlan lp = createAndProcessLPlan(query);
         checkNumExpressionPlansForJoin(lp, 4);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,20,30,40,50,10,20,30,40,50)",
@@ -1201,7 +1199,7 @@ public class TestProjectRange  {
         Iterator<Tuple> it = pigServer.openIterator("j");
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     @Test
     public void testRangeJoinMixNOSchema() throws IOException, ParserException{
         String query;
@@ -1210,17 +1208,17 @@ public class TestProjectRange  {
             "  l1 = load '" + INP_FILE_5FIELDS + "';" +
             "  l2 = load '" + INP_FILE_5FIELDS + "';" +
             "  j = join l1 by  $0 .. $3,  l2 by $0 .. $3;"
-            ; 
+            ;
 
-        compileAndCompareSchema((Schema)null, query, "j");            
+        compileAndCompareSchema((Schema)null, query, "j");
 
         //check number of group expression plans
         LogicalPlan lp = createAndProcessLPlan(query);
         checkNumExpressionPlansForJoin(lp, 4);
-        
+
         Util.registerMultiLineQuery(pigServer, query);
 
-        List<Tuple> expectedRes = 
+        List<Tuple> expectedRes =
             Util.getTuplesFromConstantTupleStrings(
                     new String[] {
                             "(10,20,30,40,50,10,20,30,40,50)",
@@ -1229,7 +1227,7 @@ public class TestProjectRange  {
         Iterator<Tuple> it = pigServer.openIterator("j");
         Util.checkQueryOutputs(it, expectedRes);
     }
-    
+
     @Test
     public void testRangeCoGroupNegNoSchema() throws IOException, ParserException{
         String query;
@@ -1242,17 +1240,17 @@ public class TestProjectRange  {
         		"(range of columns to the end) " +
                         "is only allowed if the input has a schema");
     }
-    
+
     private LOJoin checkNumExpressionPlansForJoin(LogicalPlan lp, int numPlans) {
         Class<?> joinClass = org.apache.pig.newplan.logical.relational.LOJoin.class;
         LOJoin join = (LOJoin) NewLogicalPlanUtil.getRelOpFromPlan(lp, joinClass);
-        
+
         for( int inp : join.getExpressionPlans().keySet()){
             List<LogicalExpressionPlan> plans = join.getExpressionPlans().get(inp);
             assertEquals("number of join exp plans", numPlans, plans.size());
         }
-        
+
         return join;
-    }    
-    
+    }
+
 }



Mime
View raw message