pig-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From o...@apache.org
Subject svn commit: r747660 [2/2] - in /hadoop/pig/branches/multiquery: ./ src/org/apache/pig/ src/org/apache/pig/backend/executionengine/ src/org/apache/pig/backend/hadoop/executionengine/ src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/ src/...
Date Wed, 25 Feb 2009 05:18:48 GMT
Modified: hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/OperatorPlan.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/OperatorPlan.java?rev=747660&r1=747659&r2=747660&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/OperatorPlan.java (original)
+++ hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/OperatorPlan.java Wed Feb 25 05:18:47 2009
@@ -48,7 +48,7 @@
  * for graph manipulators (such as the validators and optimizers) to
  * understand the internals of various nodes.
  */
-public abstract class OperatorPlan<E extends Operator> implements Iterable, Serializable, Cloneable {
+public abstract class OperatorPlan<E extends Operator> implements Iterable<E>, Serializable, Cloneable {
     protected Map<E, OperatorKey> mOps;
     protected Map<OperatorKey, E> mKeys;
     protected MultiMap<E, E> mFromEdges;
@@ -318,66 +318,95 @@
      * @throws PlanException
      */
     public OperatorPlan<E> merge(OperatorPlan<E> inpPlan) throws PlanException {
+       return doMerge(inpPlan, false);
+    }
+
+    /**
+     * Merges the operators in the incoming plan with this plan's operators.
+     * The plans can have shared components. 
+     *
+     * @param inpPlan
+     * @return this pointer
+     * @throws PlanException
+     */
+    public OperatorPlan<E> mergeSharedPlan(OperatorPlan<E> inpPlan) throws PlanException {
+        return doMerge(inpPlan, true);
+    }
+
+    private OperatorPlan<E> doMerge(OperatorPlan<E> inpPlan, boolean allowSharedPlan) throws PlanException {
         Map<E, OperatorKey> inpOps = inpPlan.mOps;
         Set<E> curOpsKeySet = mOps.keySet();
         for (Map.Entry<E, OperatorKey> mapEnt : inpOps.entrySet()) {
             if (curOpsKeySet.contains(mapEnt.getKey())) {
-                PlanException pe = new PlanException(
+                if (!allowSharedPlan) {
+                    PlanException pe = new PlanException(
                         "There are operators that are shared across the plans. Merge of "
-                                + "mutually exclusive plans is the only supported merge.");
-                log.error(pe.getMessage());
-                throw pe;
+                            + "mutually exclusive plans is the only supported merge.");
+                    log.error(pe.getMessage());
+                    throw pe;
+                }
+            } else {
+                mOps.put(mapEnt.getKey(), mapEnt.getValue());
             }
-            mOps.put(mapEnt.getKey(), mapEnt.getValue());
         }
 
         Map<OperatorKey, E> inpKeys = inpPlan.mKeys;
         Set<OperatorKey> curOKKeySet = mKeys.keySet();
         for (Map.Entry<OperatorKey, E> mapEnt : inpKeys.entrySet()) {
             if (curOKKeySet.contains(mapEnt.getKey())) {
-                PlanException pe = new PlanException(
+                if (!allowSharedPlan) {
+                    PlanException pe = new PlanException(
                         "There are operators that are shared across the plans. Merge of "
-                                + "mutually exclusive plans is the only supported merge.");
-                log.error(pe.getMessage());
-                throw pe;
+                            + "mutually exclusive plans is the only supported merge.");
+                    log.error(pe.getMessage());
+                    throw pe;
+                }
+            } else {
+                mKeys.put(mapEnt.getKey(), mapEnt.getValue());
             }
-            mKeys.put(mapEnt.getKey(), mapEnt.getValue());
         }
 
         MultiMap<E, E> inpFromEdges = inpPlan.mFromEdges;
         Set<E> curFEKeySet = mFromEdges.keySet();
         for (E fromEdg : inpFromEdges.keySet()) {
-            if (curFEKeySet.contains(fromEdg)) {
-                PlanException pe = new PlanException(
+            if (curFEKeySet.contains(fromEdg) && !allowSharedPlan) {
+            	PlanException pe = new PlanException(
                         "There are operators that are shared across the plans. Merge of "
-                                + "mutually exclusive plans is the only supported merge.");
+                            + "mutually exclusive plans is the only supported merge.");
                 log.error(pe.getMessage());
                 throw pe;
             }
+            
             for (E e : inpFromEdges.get(fromEdg)) {
-                mFromEdges.put(fromEdg, e);
+                if (mFromEdges.get(fromEdg) == null || !mFromEdges.get(fromEdg).contains(e)) {
+                    mFromEdges.put(fromEdg, e);
+                }
             }
         }
 
         MultiMap<E, E> inpToEdges = inpPlan.mToEdges;
         Set<E> curTEKeySet = mToEdges.keySet();
         for (E toEdg : inpToEdges.keySet()) {
-            if (curTEKeySet.contains(toEdg)) {
+            if (curTEKeySet.contains(toEdg) && !allowSharedPlan) {  
                 PlanException pe = new PlanException(
-                        "There are operators that are shared across the plans. Merge of "
-                                + "mutually exclusive plans is the only supported merge.");
+                    "There are operators that are shared across the plans. Merge of "
+                        + "mutually exclusive plans is the only supported merge.");
                 log.error(pe.getMessage());
-                throw pe;
+                throw pe;                
             }
+            
             for (E e : inpToEdges.get(toEdg)) {
-                mToEdges.put(toEdg, e);
+                if (mToEdges.get(toEdg) == null || !mToEdges.get(toEdg).contains(e)) {
+                    mToEdges.put(toEdg, e);
+                }
             }
         }
 
         markDirty();
         return this;
     }
-    
+
+
     /**
      * Utility method heavily used in the MRCompiler
      * Adds the leaf operator to the plan and connects

Added: hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/PlanDumper.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/PlanDumper.java?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/PlanDumper.java (added)
+++ hadoop/pig/branches/multiquery/src/org/apache/pig/impl/plan/PlanDumper.java Wed Feb 25 05:18:47 2009
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.impl.plan;
+
+import java.io.PrintStream;
+import java.util.LinkedList;
+import java.util.Collection;
+import org.apache.pig.impl.util.MultiMap;
+
+/**
+ * This class dumps a nested plan to a print stream. It does not walk
+ * the graph in any particular fashion it merely iterates over all
+ * operators and edges and calls a corresponding dump function. If a
+ * node of the plan has nested plans this will be dumped when the
+ * node is handled.
+ */
+public class PlanDumper<E extends Operator, 
+                        P extends OperatorPlan<E>, 
+                        S extends OperatorPlan> {
+    
+    protected PrintStream ps;
+    protected P plan;
+    protected boolean isVerbose = true;
+  
+    public PlanDumper(P plan, PrintStream ps) {
+        this.plan = plan;
+        this.ps = ps;
+    }
+
+    public void setVerbose(boolean verbose) {
+        this.isVerbose = verbose;
+    }
+
+    public boolean isVerbose() {
+        return isVerbose;
+    }
+
+    /**
+     * This is the public interface. Dump writes the plan and nested
+     * plans to the stream.
+     */
+    public void dump() {
+        for (E op: plan) {
+            MultiMap<E,S> map = getMultiInputNestedPlans(op);
+            if (isVerbose && !map.isEmpty()) {
+                dumpMultiInputNestedOperator(op, map);
+                continue;
+            }
+            
+            Collection<S> plans = getNestedPlans(op);
+            if (isVerbose && plans.size() > 0) {
+                dumpNestedOperator(op, plans);
+                continue;
+            }
+
+            dumpOperator(op);
+        }
+
+        for(E op: plan) {
+            Collection<E> successors = plan.getSuccessors(op);
+            if (successors != null) {
+                for (E suc: successors) {
+                    dumpEdge(op, suc);
+                }
+            }
+        }
+    }
+
+    /**
+     * makeDumper is a factory method. Used by subclasses to specify
+     * what dumper should handle the nested plan.
+     * @param S Plan that the new dumper should handle
+     * @return the dumper for S
+     */
+    protected PlanDumper makeDumper(S plan, PrintStream ps) {
+        return new PlanDumper(plan, ps);
+    }
+
+    /**
+     * Will be called to dump a simple operator
+     * @param E the operator to be dumped
+     */
+    protected void dumpOperator(E op) {
+        ps.println(op.name().replace(" ","_"));
+    }
+
+    /**
+     * Will be called when an operator has nested plans, which are
+     * connected to one of the multiple inputs.
+     * @param E the nested operator
+     * @param plans a map of input operator to connected nested plan
+     */
+    protected void dumpMultiInputNestedOperator(E op, MultiMap<E,S> plans) {
+        dumpOperator(op);
+        for (E aop: plans.keySet()) {
+            for (S plan: plans.get(aop)) {
+                PlanDumper dumper = new PlanDumper(plan, ps);
+                dumper.dump();
+            }
+        }
+    }
+
+    /**
+     * Will be called for nested operators. The operators are not
+     * specifically connected to any input or output operators of E
+     * @param E the nested operator
+     * @param plans a collection of sub plans.
+     */
+    protected void dumpNestedOperator(E op, Collection<S> plans) {
+        dumpOperator(op);
+        for (S plan: plans) {
+            PlanDumper  dumper = new PlanDumper(plan, ps);
+            dumper.dump();
+        }
+    }
+
+    /**
+     * Will be called to dump the edges of the plan. Each edge results
+     * in one call.
+     * @param op tail of the edge
+     * @param suc head of the edge
+     */
+    protected void dumpEdge(E op, E suc) {
+        ps.println(op.name()+" -> "+suc.name());
+    }
+
+    /**
+     * Used to determine if an operator has nested plans, which are
+     * connected to specific input operators.
+     * @param E operator
+     * @returns Map describing the input to nested plan relationship.
+     */
+    protected MultiMap<E, S> getMultiInputNestedPlans(E op) {
+        return new MultiMap<E, S>();
+    }
+
+    /**
+     * Used to determine if an operator has nested plans (without
+     * connections to in- or output operators.
+     * @param E operator
+     * @returns Collection of nested plans.
+     */
+    protected Collection<S> getNestedPlans(E op) {
+        return new LinkedList<S>();
+    }
+
+    /**
+     * Helper function to print a string array.
+     * @param sep Separator
+     * @param strings Array to print
+     */
+    protected void join(String sep, String[] strings) {
+        if (strings == null) {
+            return;
+        }
+        
+        for (int i = 0; i < strings.length; ++i) {
+            if (i != 0) {
+                ps.print(sep);
+            }
+            ps.print(strings[i]);
+        }
+    }
+}

Modified: hadoop/pig/branches/multiquery/src/org/apache/pig/tools/grunt/GruntParser.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/tools/grunt/GruntParser.java?rev=747660&r1=747659&r2=747660&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/src/org/apache/pig/tools/grunt/GruntParser.java (original)
+++ hadoop/pig/branches/multiquery/src/org/apache/pig/tools/grunt/GruntParser.java Wed Feb 25 05:18:47 2009
@@ -21,13 +21,28 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.Reader;
+import java.io.FileReader;
+import java.io.FileInputStream;
+import java.io.OutputStreamWriter;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.FileWriter;
+import java.io.FileOutputStream;
+import java.io.InputStreamReader;
+import java.io.FileNotFoundException;
+import java.io.StringReader;
+import java.io.StringWriter;
 import java.util.Iterator;
 import java.util.Map;
+import java.util.List;
+import java.util.ArrayList;
 import java.util.Properties;
 import java.io.ByteArrayOutputStream;
+import java.io.ByteArrayInputStream;
 import java.io.PrintStream;
 
 import jline.ConsoleReader;
+import jline.ConsoleReaderInputStream;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -47,6 +62,7 @@
 import org.apache.pig.tools.pigscript.parser.ParseException;
 import org.apache.pig.tools.pigscript.parser.PigScriptParser;
 import org.apache.pig.tools.pigscript.parser.PigScriptParserTokenManager;
+import org.apache.pig.tools.parameters.ParameterSubstitutionPreprocessor;
 
 public class GruntParser extends PigScriptParser {
 
@@ -73,42 +89,47 @@
     }
 
     private void init() {
-        // nothing, for now.
+        mDone = false;
     }
     
+    /** 
+     * Parses Pig commands in either interactive mode or batch mode. 
+     * In interactive mode, executes the plan right away whenever a 
+     * STORE command is encountered.
+     *
+     * @throws IOException, ParseException
+     */
     public void parseStopOnError() throws IOException, ParseException
     {
-        prompt();
-        mDone = false;
-        while(!mDone)
-            parse();
-    }
+        if (mPigServer == null) {
+            throw new IllegalStateException();
+        }
 
-    public void parseContOnError()
-    {
-        prompt();
-        mDone = false;
-        while(!mDone)
-            try
-            {
+        if (!mInteractive) {
+            mPigServer.setBatchOn();
+        }
+
+        try {
+            prompt();
+            mDone = false;
+            while(!mDone) {
                 parse();
             }
-            catch(Exception e)
-            {
-                Exception pe = Utils.getPermissionException(e);
-                if (pe != null)
-                    log.error("You don't have permission to perform the operation. Error from the server: " + pe.getMessage());
-                else {
-                    ByteArrayOutputStream bs = new ByteArrayOutputStream();
-                    e.printStackTrace(new PrintStream(bs));
-                    log.error(bs.toString());
-                    //log.error(e.getMessage());
-                    //log.error(e);
-               }
-
-            } catch (Error e) {
-                log.error(e);
+        } catch(IOException e) {
+            if (!mInteractive) {
+                mPigServer.discardBatch();
+            }
+            throw e;
+        } catch (ParseException e) {
+            if (!mInteractive) {
+                mPigServer.discardBatch();
             }
+            throw e;
+        }
+
+        if (!mInteractive) {
+            mPigServer.executeBatch();
+        }
     }
 
     public void setParams(PigServer pigServer)
@@ -136,10 +157,7 @@
 
     public void prompt()
     {
-        if (mInteractive)
-        {
-            /*System.err.print("grunt> ");
-            System.err.flush();*/
+        if (mInteractive) {
             mConsoleReader.setDefaultPrompt("grunt> ");
         }
     }
@@ -148,19 +166,150 @@
     {
         mDone = true;
     }
+
+    public boolean isDone() {
+        return mDone;
+    }
     
     protected void processDescribe(String alias) throws IOException {
         mPigServer.dumpSchema(alias);
     }
 
-    protected void processExplain(String alias) throws IOException {
-        mPigServer.explain(alias, System.out);
+    protected void processExplain(String alias, String script, boolean isVerbose, 
+                                  String format, String target, 
+                                  List<String> params, List<String> files) 
+        throws IOException, ParseException {
+
+        PrintStream out = System.out;
+
+        if (script != null) {
+            mPigServer.setBatchOn();
+            try {
+                loadScript(script, true, params, files);
+            } catch(IOException e) {
+                mPigServer.discardBatch();
+                throw e;
+            } catch (ParseException e) {
+                mPigServer.discardBatch();
+                throw e;
+            }
+        }
+        
+        if (target != null) {
+            File file = new File(target);
+
+            if (file.isDirectory()) {
+                mPigServer.explain(alias, format, isVerbose, target);
+                return;
+            }
+            else {
+                try {
+                    out = new PrintStream(new FileOutputStream(target));
+                }
+                catch (FileNotFoundException fnfe) {
+                    throw new ParseException("File not found: " + target);
+                } catch (SecurityException se) {
+                    throw new ParseException("Cannot access file: " + target);
+                }
+            }
+        }
+        mPigServer.explain(alias, format, isVerbose, out, out, out);
+        if (script != null) {
+            mPigServer.discardBatch();
+        }
     }
     
     protected void processRegister(String jar) throws IOException {
         mPigServer.registerJar(jar);
     }
 
+    private String runPreprocessor(String script, List<String> params, 
+                                   List<String> files) 
+        throws IOException, ParseException {
+
+        ParameterSubstitutionPreprocessor psp = new ParameterSubstitutionPreprocessor(50);
+        StringWriter writer = new StringWriter();
+
+        try{
+            psp.genSubstitutedFile(new BufferedReader(new FileReader(script)), 
+                                   writer,  
+                                   params.size() > 0 ? params.toArray(new String[0]) : null, 
+                                   files.size() > 0 ? files.toArray(new String[0]) : null);
+        } catch (org.apache.pig.tools.parameters.ParseException pex) {
+            throw new ParseException(pex.getMessage());
+        }
+
+        return writer.toString();
+    }
+
+    protected void processScript(String script, boolean batch, 
+                                 List<String> params, List<String> files) 
+        throws IOException, ParseException {
+        
+        if (batch) {
+            mPigServer.setBatchOn();
+            try {
+                loadScript(script, true, params, files);
+            } catch (IOException e) {
+                mPigServer.discardBatch();
+                throw e;
+            } catch (ParseException e) {
+                mPigServer.discardBatch();
+                throw e;
+            }
+            mPigServer.executeBatch();
+        } else {
+            loadScript(script, false, params, files);
+        }
+    }
+
+    private void loadScript(String script, boolean batch, 
+                                 List<String> params, List<String> files) 
+        throws IOException, ParseException {
+        
+        Reader inputReader;
+        ConsoleReader reader;
+        boolean interactive;
+         
+        try {
+            String cmds = runPreprocessor(script, params, files);
+
+            if (mInteractive && !batch) { // Write prompt and echo commands
+                // Console reader treats tabs in a special way
+                cmds = cmds.replaceAll("\t","    ");
+
+                reader = new ConsoleReader(new ByteArrayInputStream(cmds.getBytes()),
+                                           new OutputStreamWriter(System.out));
+                reader.setHistory(mConsoleReader.getHistory());
+                InputStream in = new ConsoleReaderInputStream(reader);
+                inputReader = new BufferedReader(new InputStreamReader(in));
+                interactive = true;
+            } else { // Quietly parse the statements
+                inputReader = new StringReader(cmds);
+                reader = null;
+                interactive = false;
+            }
+        } catch (FileNotFoundException fnfe) {
+            throw new ParseException("File not found: " + script);
+        } catch (SecurityException se) {
+            throw new ParseException("Cannot access file: " + script);
+        }
+
+        GruntParser parser = new GruntParser(inputReader);
+        parser.setParams(mPigServer);
+        parser.setConsoleReader(reader);
+        parser.setInteractive(interactive);
+        
+        parser.prompt();
+        while(!parser.isDone()) {
+            parser.parse();
+        }
+
+        if (interactive) {
+            System.out.println("");
+        }
+    }
+
     protected void processSet(String key, String value) throws IOException, ParseException {
         if (key.equals("debug"))
         {
@@ -173,7 +322,6 @@
         }
         else if (key.equals("job.name"))
         {
-            //mPigServer.setJobName(unquote(value));
             mPigServer.setJobName(value);
         }
         else if (key.equals("stream.skippath")) {
@@ -268,10 +416,10 @@
 
     protected void processDump(String alias) throws IOException
     {
-        Iterator result = mPigServer.openIterator(alias);
+        Iterator<Tuple> result = mPigServer.openIterator(alias);
         while (result.hasNext())
         {
-            Tuple t = (Tuple) result.next();
+            Tuple t = result.next();
             System.out.println(t);
         }
     }
@@ -433,12 +581,16 @@
     protected void processPig(String cmd) throws IOException
     {
         int start = 1;
-        if (!mInteractive)
+        if (!mInteractive) {
             start = getLineNumber();
-        if (cmd.charAt(cmd.length() - 1) != ';')
-            mPigServer.registerQuery(cmd + ";", start); 
-        else 
+        }
+        
+        if (cmd.charAt(cmd.length() - 1) != ';') {
+            mPigServer.registerQuery(cmd + ";", start);
+        }
+        else { 
             mPigServer.registerQuery(cmd, start);
+        }
     }
 
     protected void processRemove(String path, String options ) throws IOException

Modified: hadoop/pig/branches/multiquery/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj?rev=747660&r1=747659&r2=747660&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj (original)
+++ hadoop/pig/branches/multiquery/src/org/apache/pig/tools/pigscript/parser/PigScriptParser.jj Wed Feb 25 05:18:47 2009
@@ -31,6 +31,8 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.util.Stack;
+import java.util.List;
+import java.util.ArrayList;
 
 import jline.ConsoleReader;
 
@@ -62,7 +64,7 @@
 	
 	abstract protected void processDescribe(String alias) throws IOException;
 
-    abstract protected void processExplain(String alias) throws IOException;
+	abstract protected void processExplain(String alias, String script, boolean isVerbose, String format, String target, List<String> params, List<String> files) throws IOException, ParseException;
 	
 	abstract protected void processRegister(String jar) throws IOException;
 
@@ -98,7 +100,9 @@
 	
 	abstract protected void processIllustrate(String alias) throws IOException;
 
-	 static String unquote(String s)
+	abstract protected void processScript(String script, boolean batch, List<String> params, List<String> files) throws IOException, ParseException;
+
+	static String unquote(String s)
 	{
 		if (s.charAt(0) == '\'' && s.charAt(s.length()-1) == '\'')
 			return s.substring(1, s.length()-1);
@@ -142,6 +146,14 @@
 TOKEN: {<REMOVEFORCE: "rmf">}
 TOKEN: {<SET: "set">}
 TOKEN: {<ILLUSTRATE: "illustrate">}
+TOKEN: {<RUN: "run">}
+TOKEN: {<EXEC: "exec">}
+TOKEN: {<PARAM: "-param">}
+TOKEN: {<PARAM_FILE: "-param_file">}
+TOKEN: {<SCRIPT: "-script">}
+TOKEN: {<DOT: "-dot">}
+TOKEN: {<OUT: "-out">}
+TOKEN: {<BRIEF: "-brief">}
 
 // internal use commands
 TOKEN: {<SCRIPT_DONE: "scriptDone">}
@@ -320,8 +332,8 @@
 |	<#DIGIT : ["0"-"9"] >
 |	<#SPECIALCHAR : ["_"] >
 |	<#FSSPECIALCHAR: ["/"]>
-|       <#FLOAT: <INTEGER> ( "." <INTEGER> )? | "." <INTEGER> >
-|       <#INTEGER: ( <DIGIT> )+ >
+|	<#FLOAT: <INTEGER> ( "." <INTEGER> )? | "." <INTEGER> >
+|	<#INTEGER: ( <DIGIT> )+ >
 |	<#NUMBER: <INTEGER> | <FLOAT> | <FLOAT> ( ["e","E"] ([ "-","+"])? <FLOAT> )?>
 }
 
@@ -392,9 +404,7 @@
 	t1 = <IDENTIFIER>
 	{processDescribe(t1.image);}
 	|
-    <EXPLAIN>
-	t1 = <IDENTIFIER>
-	{processExplain(t1.image);}
+	Explain()
 	|
 	<HELP>
 	{printHelp();}
@@ -433,6 +443,8 @@
 	t1 = GetPath()
 	{processRegister(t1.image);}
 	|
+	Script()
+	|
 	<REMOVE>
 	(
 		t1 = GetPath()
@@ -467,6 +479,88 @@
 	)
 }
 
+void Explain() throws IOException:
+{
+	Token t;
+	String alias = null;
+        String script = null;
+	String format="text";
+	String target=null;
+	boolean isVerbose = true;
+	ArrayList<String> params;
+	ArrayList<String> files;
+
+}
+{
+	<EXPLAIN>
+	{
+		params = new ArrayList<String>(); 
+		files = new ArrayList<String>();
+	}
+	(
+		<BRIEF>
+		{isVerbose = false;}
+         	|
+         	<DOT>
+        	{format = "dot";}
+         	|
+		<OUT>
+		t = GetPath()
+		{target = t.image;}
+		|
+		<SCRIPT>
+		t = GetPath()
+		{script = t.image;}
+		|
+		<PARAM>
+		t = GetPath()
+		{params.add(t.image);}
+		|
+		<PARAM_FILE>
+		t = GetPath()
+		{files.add(t.image);}
+	)*
+	(
+		t = <IDENTIFIER>
+		{alias = t.image;}
+	)?
+	{processExplain(alias, script, isVerbose, format, target, params, files);}
+}
+
+void Script() throws IOException:
+{
+    Token t;
+    String script;
+    boolean batch = false;
+    ArrayList<String> params;
+    ArrayList<String> files;
+}
+{
+	(
+ 		<RUN>
+		{batch = false;}
+	|
+		<EXEC>
+		{batch = true;}
+	)
+    	{
+		params = new ArrayList<String>(); 
+		files = new ArrayList<String>();
+	}
+	(
+		<PARAM>
+		t = GetPath()
+		{params.add(t.image);}
+	|
+		<PARAM_FILE>
+		t = GetPath()
+		{files.add(t.image);}
+	)*
+	t = GetPath()
+	{script = t.image;}
+	{processScript(script, batch, params, files);}
+}
+
 Token GetPath() :
 {
 	Token t;
@@ -475,7 +569,7 @@
 	(
 	t = <IDENTIFIER>
 	|
-        t = <PATH>
+	t = <PATH>
 	|
 	t = GetReserved()
 	)
@@ -548,6 +642,22 @@
 	t = <SET>
 	|
 	t = <SCRIPT_DONE>
+	|
+	t = <RUN>
+	|
+	t = <EXEC>
+	|
+	t = <OUT>
+	|
+	t = <SCRIPT>
+	|
+	t = <DOT>
+	|
+	t = <BRIEF>
+	|
+	t = <PARAM>
+	|
+	t = <PARAM_FILE>
 	)
 
 	{return t;}

Modified: hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestGrunt.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestGrunt.java?rev=747660&r1=747659&r2=747660&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestGrunt.java (original)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestGrunt.java Wed Feb 25 05:18:47 2009
@@ -36,8 +36,14 @@
 
 public class TestGrunt extends TestCase {
     MiniCluster cluster = MiniCluster.buildCluster();
+    private String basedir;
 
     private final Log log = LogFactory.getLog(getClass());
+
+    public TestGrunt(String name) {
+        super(name);
+        basedir = "test/org/apache/pig/test/data";
+    }
     
 /*    @Test 
     public void testCopyFromLocal() throws Throwable {
@@ -305,4 +311,164 @@
     
         grunt.exec();
     }
+
+    @Test
+    public void testRunStatment() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate);" +
+                        " run -param LIMIT=5 -param_file " + basedir +
+                        "/test_broken.ppf " + basedir + "/testsub.pig; explain bar";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+    
+        grunt.exec();
+    }
+
+    @Test
+    public void testExecStatment() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        boolean caught = false;
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate);" +
+                        " exec -param LIMIT=5 -param FUNCTION=COUNT " +
+                        "-param FILE=foo " + basedir + "/testsub.pig; explain bar";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+        
+        try {
+            grunt.exec();
+        } catch (Exception e) {
+            caught = true;
+            assertTrue(e.getMessage().contains("alias bar"));
+        }
+        assertTrue(caught);
+    }
+
+    @Test
+    public void testRunStatmentNested() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate); run "
+            +basedir+"/testsubnested_run.pig; explain bar";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+    
+        grunt.exec();
+    }
+
+    @Test
+    public void testExecStatmentNested() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        boolean caught = false;
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate); exec "
+            +basedir+"/testsubnested_exec.pig; explain bar";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+        
+        try {
+            grunt.exec();
+        } catch (Exception e) {
+            caught = true;
+            assertTrue(e.getMessage().contains("alias bar"));
+        }
+        assertTrue(caught);
+    }
+    
+    @Test
+    public void testExplainEmpty() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate); run "
+            +basedir+"/testsubnested_run.pig; explain";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+    
+        grunt.exec();
+    }
+
+    @Test
+    public void testExplainScript() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate); explain -script "
+            +basedir+"/testsubnested_run.pig;";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+    
+        grunt.exec();
+    }
+
+    @Test
+    public void testExplainBrief() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate); explain -brief -script "
+            +basedir+"/testsubnested_run.pig;";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+    
+        grunt.exec();
+    }
+
+    @Test
+    public void testExplainDot() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate); explain -dot -script "
+            +basedir+"/testsubnested_run.pig;";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+    
+        grunt.exec();
+    }
+
+    @Test
+    public void testExplainOut() throws Throwable {
+        PigServer server = new PigServer("MAPREDUCE");
+        PigContext context = server.getPigContext();
+        
+        String strCmd = "a = load 'foo' as (foo, fast, regenerate); explain -out /tmp -script "
+            +basedir+"/testsubnested_run.pig;";
+        
+        ByteArrayInputStream cmd = new ByteArrayInputStream(strCmd.getBytes());
+        InputStreamReader reader = new InputStreamReader(cmd);
+        
+        Grunt grunt = new Grunt(new BufferedReader(reader), context);
+    
+        grunt.exec();
+    }
 }

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQuery.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQuery.java?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQuery.java (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQuery.java Wed Feb 25 05:18:47 2009
@@ -0,0 +1,505 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.test;
+
+import java.io.StringReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.pig.ExecType;
+import org.apache.pig.PigException;
+import org.apache.pig.PigServer;
+import org.apache.pig.backend.executionengine.ExecJob;
+import org.apache.pig.backend.executionengine.util.ExecTools;
+import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
+import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
+import org.apache.pig.impl.io.FileLocalizer;
+import org.apache.pig.impl.logicalLayer.LogicalPlan;
+import org.apache.pig.impl.plan.Operator;
+import org.apache.pig.impl.plan.OperatorPlan;
+import org.apache.pig.tools.grunt.GruntParser;
+import org.apache.pig.tools.grunt.Utils;
+import org.apache.pig.tools.pigscript.parser.ParseException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestMultiQuery extends TestCase {
+
+    private static final MiniCluster cluster = MiniCluster.buildCluster();
+
+    private PigServer myPig;
+
+    @Before
+    public void setUp() throws Exception {
+        myPig = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        myPig = null;
+    }
+
+    @Test
+    public void testMultiQueryWithTwoStores() {
+
+        System.out.println("===== test multi-query with 2 stores =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = group b by gid;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+
+            LogicalPlan lp = checkLogicalPlan(1, 2, 9);
+
+            PhysicalPlan pp = checkPhysicalPlan(lp, 1, 2, 11);
+
+            checkMRPlan(pp, 1, 2, 3);
+
+            Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithTwoStores2() {
+
+        System.out.println("===== test multi-query with 2 stores (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = group b by gid;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithThreeStores() {
+
+        System.out.println("===== test multi-query with 3 stores =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+            myPig.registerQuery("d = filter c by uid > 1500;");
+            myPig.registerQuery("store d into '/tmp/output3';");
+
+            LogicalPlan lp = checkLogicalPlan(1, 3, 14);
+
+            PhysicalPlan pp = checkPhysicalPlan(lp, 1, 3, 14);
+
+            checkMRPlan(pp, 1, 3, 5);
+
+            Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithThreeStores2() {
+
+        System.out.println("===== test multi-query with 3 stores (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+            myPig.registerQuery("d = filter c by uid > 1500;");
+            myPig.registerQuery("store d into '/tmp/output3';");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithTwoLoads() {
+
+        System.out.println("===== test multi-query with two loads =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = load 'file:test/org/apache/pig/test/data/passwd2' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("c = filter a by uid > 500;");
+            myPig.registerQuery("d = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output1';");
+            myPig.registerQuery("store d into '/tmp/output2';");
+            myPig.registerQuery("e = cogroup c by uid, d by uid;");
+            myPig.registerQuery("store e into '/tmp/output3';");
+
+            LogicalPlan lp = checkLogicalPlan(2, 3, 16);
+
+            PhysicalPlan pp = checkPhysicalPlan(lp, 2, 3, 19);
+
+            checkMRPlan(pp, 2, 3, 5);
+
+            Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithTwoLoads2() {
+
+        System.out.println("===== test multi-query with two loads (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = load 'file:test/org/apache/pig/test/data/passwd2' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("c = filter a by uid > 500;");
+            myPig.registerQuery("d = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output1';");
+            myPig.registerQuery("store d into '/tmp/output2';");
+            myPig.registerQuery("e = cogroup c by uid, d by uid;");
+            myPig.registerQuery("store e into '/tmp/output3';");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithNoStore() {
+
+        System.out.println("===== test multi-query with no store =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("group b by gid;");
+
+            LogicalPlan lp = checkLogicalPlan(0, 0, 0);
+
+            PhysicalPlan pp = checkPhysicalPlan(lp, 0, 0, 0);
+
+            //checkMRPlan(pp, 1, 1, 1);
+
+            //Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithNoStore2() {
+
+        System.out.println("===== test multi-query with no store (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("group b by gid;");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        }
+    }
+
+
+    @Test
+    public void testMultiQueryWithExplain() {
+
+        System.out.println("===== test multi-query with explain =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "explain b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithDump() {
+
+        System.out.println("===== test multi-query with dump =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "dump b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithDescribe() {
+
+        System.out.println("===== test multi-query with describe =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "describe b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithIllustrate() {
+
+        System.out.println("===== test multi-query with illustrate =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "illustrate b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    // --------------------------------------------------------------------------
+    // Helper methods
+
+    private <T extends OperatorPlan<? extends Operator<?>>> 
+    void showPlanOperators(T p) {
+        System.out.println("Operators:");
+
+        ArrayList<Operator<?>> ops = new ArrayList<Operator<?>>(p.getKeys()
+                .values());
+        Collections.sort(ops);
+        for (Operator<?> op : ops) {
+            System.out.println("    op: " + op.name());
+        }
+        System.out.println();
+    }
+
+    private LogicalPlan checkLogicalPlan(int expectedRoots,
+            int expectedLeaves, int expectedSize) throws IOException,
+            ParseException {
+
+        System.out.println("===== check logical plan =====");
+        
+        LogicalPlan lp = null;
+
+        try {
+            java.lang.reflect.Method compileLp = myPig.getClass()
+                    .getDeclaredMethod("compileLp",
+                            new Class[] { String.class });
+
+            compileLp.setAccessible(true);
+
+            lp = (LogicalPlan) compileLp.invoke(myPig, new Object[] { null });
+
+            Assert.assertNotNull(lp);
+
+        } catch (Exception e) {
+            PigException pe = Utils.getPigException(e);
+            if (pe != null) {
+                throw pe;
+            } else {
+                e.printStackTrace();
+                Assert.fail();
+            }
+        }
+
+        Assert.assertEquals(expectedRoots, lp.getRoots().size());
+        Assert.assertEquals(expectedLeaves, lp.getLeaves().size());
+        Assert.assertEquals(expectedSize, lp.size());
+
+        showPlanOperators(lp);
+
+        return lp;
+    }
+
+    private PhysicalPlan checkPhysicalPlan(LogicalPlan lp, int expectedRoots,
+            int expectedLeaves, int expectedSize) throws IOException {
+
+        System.out.println("===== check physical plan =====");
+
+        PhysicalPlan pp = myPig.getPigContext().getExecutionEngine().compile(
+                lp, null);
+
+        Assert.assertEquals(expectedRoots, pp.getRoots().size());
+        Assert.assertEquals(expectedLeaves, pp.getLeaves().size());
+        Assert.assertEquals(expectedSize, pp.size());
+
+        showPlanOperators(pp);
+
+        return pp;
+    }
+
+    private MROperPlan checkMRPlan(PhysicalPlan pp, int expectedRoots,
+            int expectedLeaves, int expectedSize) throws IOException {
+
+        System.out.println("===== check map-reduce plan =====");
+
+        ExecTools.checkLeafIsStore(pp, myPig.getPigContext());
+        
+        MRCompiler mrcomp = new MRCompiler(pp, myPig.getPigContext());
+        MROperPlan mrp = mrcomp.compile();
+
+        Assert.assertEquals(expectedRoots, mrp.getRoots().size());
+        Assert.assertEquals(expectedLeaves, mrp.getLeaves().size());
+        Assert.assertEquals(expectedSize, mrp.size());
+
+        showPlanOperators(mrp);
+
+        return mrp;
+    }
+
+    private boolean executePlan(PhysicalPlan pp) throws IOException {
+        FileLocalizer.clearDeleteOnFail();
+        ExecJob job = myPig.getPigContext().getExecutionEngine().execute(pp, "execute");
+        boolean failed = (job.getStatus() == ExecJob.JOB_STATUS.FAILED);
+        if (failed) {
+            FileLocalizer.triggerDeleteOnFail();
+        }
+        return !failed;
+    }
+
+    private void deleteOutputFiles() {
+        try {
+            FileLocalizer.delete("/tmp/output1", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output2", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output3", myPig.getPigContext());
+        } catch (IOException e) {
+            e.printStackTrace();
+            Assert.fail();
+        }
+    }
+
+}

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQueryLocal.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQueryLocal.java?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQueryLocal.java (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestMultiQueryLocal.java Wed Feb 25 05:18:47 2009
@@ -0,0 +1,510 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.pig.test;
+
+import java.io.StringReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import junit.framework.Assert;
+import junit.framework.TestCase;
+
+import org.apache.pig.ExecType;
+import org.apache.pig.PigException;
+import org.apache.pig.PigServer;
+import org.apache.pig.backend.executionengine.ExecJob;
+import org.apache.pig.backend.executionengine.util.ExecTools;
+import org.apache.pig.backend.hadoop.executionengine.HExecutionEngine;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher;
+import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
+import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
+import org.apache.pig.impl.io.FileLocalizer;
+import org.apache.pig.impl.logicalLayer.LogicalPlan;
+import org.apache.pig.impl.plan.Operator;
+import org.apache.pig.impl.plan.OperatorPlan;
+import org.apache.pig.tools.grunt.GruntParser;
+import org.apache.pig.tools.grunt.Utils;
+import org.apache.pig.tools.pigscript.parser.ParseException;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestMultiQueryLocal extends TestCase {
+
+    private PigServer myPig;
+
+    @Before
+    public void setUp() throws Exception {
+        myPig = new PigServer("local");
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        myPig = null;
+    }
+
+    @Test
+    public void testMultiQueryWithTwoStores() {
+
+        System.out.println("===== test multi-query with 2 stores =====");
+
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = group b by gid;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+
+            LogicalPlan lp = checkLogicalPlan(1, 2, 9);
+
+            // XXX Physical plan has one less node in the local case
+            PhysicalPlan pp = checkPhysicalPlan(lp, 1, 2, 10);
+
+            // XXX MR plan doesn't seem to work in the local case
+            checkMRPlan(pp, 2, 2, 2);
+
+            Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithTwoStores2() {
+
+        System.out.println("===== test multi-query with 2 stores (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = group b by gid;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithThreeStores() {
+
+        System.out.println("===== test multi-query with 3 stores =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+            myPig.registerQuery("d = filter c by uid > 1500;");
+            myPig.registerQuery("store d into '/tmp/output3';");
+
+            LogicalPlan lp = checkLogicalPlan(1, 3, 14);
+
+            PhysicalPlan pp = checkPhysicalPlan(lp, 1, 3, 14);
+
+            // XXX MR plan doesn't seem to work in the local case
+            checkMRPlan(pp, 3, 3, 3);
+
+            Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithThreeStores2() {
+
+        System.out.println("===== test multi-query with 3 stores (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("store b into '/tmp/output1';");
+            myPig.registerQuery("c = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output2';");
+            myPig.registerQuery("d = filter c by uid > 1500;");
+            myPig.registerQuery("store d into '/tmp/output3';");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithTwoLoads() {
+
+        System.out.println("===== test multi-query with two loads =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = load 'file:test/org/apache/pig/test/data/passwd2' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("c = filter a by uid > 500;");
+            myPig.registerQuery("d = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output1';");
+            myPig.registerQuery("store d into '/tmp/output2';");
+            myPig.registerQuery("e = cogroup c by uid, d by uid;");
+            myPig.registerQuery("store e into '/tmp/output3';");
+
+            LogicalPlan lp = checkLogicalPlan(2, 3, 16);
+
+            // XXX the total number of ops is one less in the local case
+            PhysicalPlan pp = checkPhysicalPlan(lp, 2, 3, 18);
+
+            // XXX MR plan doesn't seem to work in the local case
+            checkMRPlan(pp, 4, 4, 4);
+
+            Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithTwoLoads2() {
+
+        System.out.println("===== test multi-query with two loads (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = load 'file:test/org/apache/pig/test/data/passwd2' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("c = filter a by uid > 500;");
+            myPig.registerQuery("d = filter b by uid > 1000;");
+            myPig.registerQuery("store c into '/tmp/output1';");
+            myPig.registerQuery("store d into '/tmp/output2';");
+            myPig.registerQuery("e = cogroup c by uid, d by uid;");
+            myPig.registerQuery("store e into '/tmp/output3';");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithNoStore() {
+
+        System.out.println("===== test multi-query with no store =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("group b by gid;");
+
+            LogicalPlan lp = checkLogicalPlan(0, 0, 0);
+
+            // XXX Physical plan has one less node in the local case
+            PhysicalPlan pp = checkPhysicalPlan(lp, 0, 0, 0);
+
+            //checkMRPlan(pp, 0, 0, 0);
+
+            //Assert.assertTrue(executePlan(pp));
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithNoStore2() {
+
+        System.out.println("===== test multi-query with no store (2) =====");
+
+        try {
+            myPig.setBatchOn();
+
+            myPig.registerQuery("a = load 'file:test/org/apache/pig/test/data/passwd' " +
+                                "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);");
+            myPig.registerQuery("b = filter a by uid > 500;");
+            myPig.registerQuery("group b by gid;");
+
+            myPig.executeBatch();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        }
+    }
+
+
+    @Test
+    public void testMultiQueryWithExplain() {
+
+        System.out.println("===== test multi-query with explain =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "explain b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithDump() {
+
+        System.out.println("===== test multi-query with dump =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "dump b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithDescribe() {
+
+        System.out.println("===== test multi-query with describe =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "describe b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    @Test
+    public void testMultiQueryWithIllustrate() {
+
+        System.out.println("===== test multi-query with illustrate =====");
+
+        try {
+            String script = "a = load 'file:test/org/apache/pig/test/data/passwd' "
+                          + "using PigStorage(':') as (uname:chararray, passwd:chararray, uid:int,gid:int);"
+                          + "b = filter a by uid > 500;"
+                          + "illustrate b;"
+                          + "store b into '/tmp/output1';\n";
+            
+            GruntParser parser = new GruntParser(new StringReader(script));
+            parser.setInteractive(false);
+            parser.setParams(myPig);
+            parser.parseStopOnError();
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            Assert.fail();
+        } finally {
+            deleteOutputFiles();
+        }
+    }
+
+    // --------------------------------------------------------------------------
+    // Helper methods
+
+    private <T extends OperatorPlan<? extends Operator<?>>> 
+    void showPlanOperators(T p) {
+        System.out.println("Operators:");
+
+        ArrayList<Operator<?>> ops = new ArrayList<Operator<?>>(p.getKeys()
+                .values());
+        Collections.sort(ops);
+        for (Operator<?> op : ops) {
+            System.out.println("    op: " + op.name());
+        }
+        System.out.println();
+    }
+
+    private LogicalPlan checkLogicalPlan(int expectedRoots,
+            int expectedLeaves, int expectedSize) throws IOException,
+            ParseException {
+
+        System.out.println("===== check logical plan =====");
+    
+        LogicalPlan lp = null;
+
+        try {
+            java.lang.reflect.Method compileLp = myPig.getClass()
+                    .getDeclaredMethod("compileLp",
+                            new Class[] { String.class });
+
+            compileLp.setAccessible(true);
+
+            lp = (LogicalPlan) compileLp.invoke(myPig, new Object[] { null });
+
+            Assert.assertNotNull(lp);
+
+        } catch (Exception e) {
+            PigException pe = Utils.getPigException(e);
+            if (pe != null) {
+                throw pe;
+            } else {
+                e.printStackTrace();
+                Assert.fail();
+            }
+        }
+
+        Assert.assertEquals(expectedRoots, lp.getRoots().size());
+        Assert.assertEquals(expectedLeaves, lp.getLeaves().size());
+        Assert.assertEquals(expectedSize, lp.size());
+
+        showPlanOperators(lp);
+
+        return lp;
+    }
+
+    private PhysicalPlan checkPhysicalPlan(LogicalPlan lp, int expectedRoots,
+            int expectedLeaves, int expectedSize) throws IOException {
+
+        System.out.println("===== check physical plan =====");
+
+        PhysicalPlan pp = myPig.getPigContext().getExecutionEngine().compile(
+                lp, null);
+
+        Assert.assertEquals(expectedRoots, pp.getRoots().size());
+        Assert.assertEquals(expectedLeaves, pp.getLeaves().size());
+        Assert.assertEquals(expectedSize, pp.size());
+
+        showPlanOperators(pp);
+
+        return pp;
+    }
+
+    private MROperPlan checkMRPlan(PhysicalPlan pp, int expectedRoots,
+            int expectedLeaves, int expectedSize) throws IOException {
+
+        System.out.println("===== check map-reduce plan =====");
+
+        ExecTools.checkLeafIsStore(pp, myPig.getPigContext());
+        
+        MRCompiler mrcomp = new MRCompiler(pp, myPig.getPigContext());
+        MROperPlan mrp = mrcomp.compile();
+
+        Assert.assertEquals(expectedRoots, mrp.getRoots().size());
+        Assert.assertEquals(expectedLeaves, mrp.getLeaves().size());
+        Assert.assertEquals(expectedSize, mrp.size());
+
+        showPlanOperators(mrp);
+
+        return mrp;
+    }
+
+    private boolean executePlan(PhysicalPlan pp) throws IOException {
+        FileLocalizer.clearDeleteOnFail();
+        ExecJob job = myPig.getPigContext().getExecutionEngine().execute(pp, "execute");
+        boolean failed = (job.getStatus() == ExecJob.JOB_STATUS.FAILED);
+        if (failed) {
+            FileLocalizer.triggerDeleteOnFail();
+        }
+        return !failed;
+    }
+
+    private void deleteOutputFiles() {
+        try {
+            FileLocalizer.delete("/tmp/output1", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output2", myPig.getPigContext());
+            FileLocalizer.delete("/tmp/output3", myPig.getPigContext());
+        } catch (IOException e) {
+            e.printStackTrace();
+            Assert.fail();
+        }
+    }
+
+}

Modified: hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestPigScriptParser.java
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestPigScriptParser.java?rev=747660&r1=747659&r2=747660&view=diff
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestPigScriptParser.java (original)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/TestPigScriptParser.java Wed Feb 25 05:18:47 2009
@@ -52,6 +52,7 @@
         
         // Start the real parsing job
         {
+
         	// Initial statement
         	String query = String.format("A = LOAD '%s' ;", Util.encodeEscape(tempFile)) ;
         	ByteArrayInputStream in = new ByteArrayInputStream(query.getBytes()); 

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd Wed Feb 25 05:18:47 2009
@@ -0,0 +1,14 @@
+root:x:0:0:root:/root:/bin/bash
+bin:x:1:1:bin:/bin:/sbin/nologin
+daemon:x:2:2:daemon:/sbin:/sbin/nologin
+adm:x:3:4:adm:/var/adm:/sbin/nologin
+lp:x:4:7:lp:/var/spool/lpd:/sbin/nologin
+sync:x:5:0:sync:/sbin:/bin/sync
+shutdown:x:6:0:shutdown:/sbin:/sbin/shutdown
+halt:x:7:0:halt:/sbin:/sbin/halt
+mail:x:8:12:mail:/var/spool/mail:/sbin/nologin
+news:x:9:13:news:/etc/news:
+uucp:x:10:14:uucp:/var/spool/uucp:/sbin/nologin
+operator:x:11:0:operator:/root:/sbin/nologin
+games:x:12:100:games:/usr/games:/sbin/nologin
+gopher:x:13:30:gopher:/var/gopher:/sbin/nologin

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd2
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd2?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd2 (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/passwd2 Wed Feb 25 05:18:47 2009
@@ -0,0 +1,14 @@
+root:x:0:0:root:/root:/bin/bash
+bin:x:1:1:bin:/bin:/sbin/nologin
+daemon:x:2:2:daemon:/sbin:/sbin/nologin
+adm:x:3:4:adm:/var/adm:/sbin/nologin
+lp:x:4:7:lp:/var/spool/lpd:/sbin/nologin
+sync:x:5:0:sync:/sbin:/bin/sync
+shutdown:x:6:0:shutdown:/sbin:/sbin/shutdown
+halt:x:7:0:halt:/sbin:/sbin/halt
+mail:x:8:12:mail:/var/spool/mail:/sbin/nologin
+news:x:9:13:news:/etc/news:
+uucp:x:10:14:uucp:/var/spool/uucp:/sbin/nologin
+operator:x:11:0:operator:/root:/sbin/nologin
+games:x:12:100:games:/usr/games:/sbin/nologin
+gopher:x:13:30:gopher:/var/gopher:/sbin/nologin

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test.ppf
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test.ppf?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test.ppf (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test.ppf Wed Feb 25 05:18:47 2009
@@ -0,0 +1,3 @@
+LIMIT = 5
+FILE = foo
+FUNCTION = COUNT

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test_broken.ppf
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test_broken.ppf?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test_broken.ppf (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/test_broken.ppf Wed Feb 25 05:18:47 2009
@@ -0,0 +1,3 @@
+LIMIT = WONTWORK
+FILE = foo
+FUNCTION = COUNT

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsub.pig
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsub.pig?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsub.pig (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsub.pig Wed Feb 25 05:18:47 2009
@@ -0,0 +1,4 @@
+foo = load '$FILE' as (foo, fast, regenerate);
+bar = limit foo $LIMIT;
+baz = foreach bar generate $FUNCTION($0);
+explain baz;

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_exec.pig
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_exec.pig?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_exec.pig (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_exec.pig Wed Feb 25 05:18:47 2009
@@ -0,0 +1 @@
+exec -param_file test/org/apache/pig/test/data/test.ppf test/org/apache/pig/test/data/testsub.pig;

Added: hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_run.pig
URL: http://svn.apache.org/viewvc/hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_run.pig?rev=747660&view=auto
==============================================================================
--- hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_run.pig (added)
+++ hadoop/pig/branches/multiquery/test/org/apache/pig/test/data/testsubnested_run.pig Wed Feb 25 05:18:47 2009
@@ -0,0 +1 @@
+run -param LIMIT=5 -param FILE=foo -param FUNCTION=COUNT test/org/apache/pig/test/data/testsub.pig;



Mime
View raw message