tajo-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hj...@apache.org
Subject [06/35] TAJO-1125: Separate logical plan and optimizer into a maven module.
Date Sun, 26 Oct 2014 19:27:12 GMT
http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/ExplainLogicalPlanVisitor.java
----------------------------------------------------------------------
diff --git a/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/ExplainLogicalPlanVisitor.java b/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/ExplainLogicalPlanVisitor.java
new file mode 100644
index 0000000..7065295
--- /dev/null
+++ b/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/ExplainLogicalPlanVisitor.java
@@ -0,0 +1,250 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.plan.visitor;
+
+import org.apache.tajo.annotation.Nullable;
+import org.apache.tajo.plan.LogicalPlan;
+import org.apache.tajo.plan.PlanString;
+import org.apache.tajo.plan.PlanningException;
+import org.apache.tajo.plan.logical.*;
+
+import java.util.Stack;
+
+/**
+ * It returns a list of node plan strings.
+ */
+public class ExplainLogicalPlanVisitor extends BasicLogicalPlanVisitor<ExplainLogicalPlanVisitor.Context, LogicalNode> {
+
+  public static class Context {
+    public int maxDepth  = -1;
+    public int depth = 0;
+    public Stack<DepthString> explains = new Stack<DepthString>();
+
+    public void add(int depth, PlanString planString) {
+      maxDepth = Math.max(maxDepth, depth);
+      explains.push(new DepthString(depth, planString));
+    }
+
+    public int getMaxDepth() {
+      return this.maxDepth;
+    }
+
+    public Stack<DepthString> getExplains() {
+      return explains;
+    }
+  }
+
+  public static class DepthString {
+    private int depth;
+    private PlanString planStr;
+
+    DepthString(int depth, PlanString planStr) {
+      this.depth = depth;
+      this.planStr = planStr;
+    }
+
+    public int getDepth() {
+      return depth;
+    }
+
+    public PlanString getPlanString() {
+      return planStr;
+    }
+  }
+
+  public Context getBlockPlanStrings(@Nullable LogicalPlan plan, LogicalNode node) throws PlanningException {
+    Stack<LogicalNode> stack = new Stack<LogicalNode>();
+    Context explainContext = new Context();
+    visit(explainContext, plan, null, node, stack);
+    return explainContext;
+  }
+
+  @Override
+  public LogicalNode visitRoot(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, LogicalRootNode node, Stack<LogicalNode> stack)
+      throws PlanningException {
+    return visit(context, plan, block, node.getChild(), stack);
+  }
+
+  @Override
+  public LogicalNode visitProjection(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                     ProjectionNode node, Stack<LogicalNode> stack)
+      throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitLimit(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                LimitNode node, Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitSort(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, SortNode node,
+                               Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  public LogicalNode visitHaving(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, HavingNode node,
+                                  Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitGroupBy(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, GroupbyNode node,
+                                  Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitWindowAgg(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, WindowAggNode node,
+                                    Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  public LogicalNode visitDistinct(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, DistinctGroupbyNode node,
+                                  Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  private LogicalNode visitUnaryNode(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                     UnaryNode node, Stack<LogicalNode> stack) throws PlanningException {
+    context.depth++;
+    stack.push(node);
+    visit(context, plan, block, node.getChild(), stack);
+    context.depth--;
+    context.add(context.depth, node.getPlanString());
+    return node;
+  }
+
+  private LogicalNode visitBinaryNode(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, BinaryNode node,
+                                      Stack<LogicalNode> stack)
+      throws PlanningException {
+    context.depth++;
+    stack.push(node);
+    visit(context, plan, block, node.getLeftChild(), stack);
+    visit(context, plan, block, node.getRightChild(), stack);
+    stack.pop();
+    context.depth--;
+    context.add(context.depth, node.getPlanString());
+    return node;
+  }
+
+  @Override
+  public LogicalNode visitFilter(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, SelectionNode node,
+                                 Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitJoin(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode node,
+                               Stack<LogicalNode> stack) throws PlanningException {
+    return visitBinaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitUnion(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, UnionNode node,
+                                Stack<LogicalNode> stack) throws PlanningException {
+    return visitBinaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitExcept(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, ExceptNode node,
+                                 Stack<LogicalNode> stack) throws PlanningException {
+    return visitBinaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitIntersect(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, IntersectNode node,
+                                    Stack<LogicalNode> stack) throws PlanningException {
+    return visitBinaryNode(context, plan, block, node, stack);
+  }
+
+  @Override
+  public LogicalNode visitTableSubQuery(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                        TableSubQueryNode node, Stack<LogicalNode> stack) throws PlanningException {
+    context.depth++;
+    stack.push(node);
+    visit(context, plan, block, node.getSubQuery(), new Stack<LogicalNode>());
+    stack.pop();
+    context.depth--;
+    context.add(context.depth, node.getPlanString());
+
+    return node;
+  }
+
+  @Override
+  public LogicalNode visitScan(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, ScanNode node,
+                               Stack<LogicalNode> stack) throws PlanningException {
+    context.add(context.depth, node.getPlanString());
+    return node;
+  }
+
+  @Override
+  public LogicalNode visitPartitionedTableScan(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                          PartitionedTableScanNode node, Stack<LogicalNode> stack)
+      throws PlanningException {
+    context.add(context.depth, node.getPlanString());
+    return node;
+  }
+
+  @Override
+  public LogicalNode visitStoreTable(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                     StoreTableNode node, Stack<LogicalNode> stack) throws PlanningException {
+    return visitUnaryNode(context, plan, block, node, stack);
+  }
+
+  public LogicalNode visitCreateDatabase(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                         CreateDatabaseNode node, Stack<LogicalNode> stack) throws PlanningException {
+    context.add(context.depth, node.getPlanString());
+    return node;
+  }
+
+  public LogicalNode visitDropDatabase(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                         DropDatabaseNode node, Stack<LogicalNode> stack) throws PlanningException {
+    context.add(context.depth, node.getPlanString());
+    return node;
+  }
+
+  @Override
+  public LogicalNode visitInsert(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, InsertNode node,
+                                 Stack<LogicalNode> stack) throws PlanningException {
+    context.depth++;
+    stack.push(node);
+    super.visitInsert(context, plan, block, node, stack);
+    stack.pop();
+    context.depth--;
+    context.add(context.depth, node.getPlanString());
+    return node;
+  }
+
+  public static String printDepthString(int maxDepth, DepthString planStr) {
+    StringBuilder output = new StringBuilder();
+    String pad = new String(new char[planStr.getDepth() * 3]).replace('\0', ' ');
+    output.append(pad + planStr.getPlanString().getTitle()).append("\n");
+
+    for (String str : planStr.getPlanString().getExplanations()) {
+      output.append(pad).append("  => ").append(str).append("\n");
+    }
+
+    for (String str : planStr.getPlanString().getDetails()) {
+      output.append(pad).append("  => ").append(str).append("\n");
+    }
+    return output.toString();
+  }
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/LogicalPlanVisitor.java
----------------------------------------------------------------------
diff --git a/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/LogicalPlanVisitor.java b/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/LogicalPlanVisitor.java
new file mode 100644
index 0000000..29807f1
--- /dev/null
+++ b/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/LogicalPlanVisitor.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.plan.visitor;
+
+import org.apache.tajo.plan.LogicalPlan;
+import org.apache.tajo.plan.PlanningException;
+import org.apache.tajo.plan.logical.*;
+
+import java.util.Stack;
+
+public interface LogicalPlanVisitor<CONTEXT, RESULT> {
+  RESULT visitRoot(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, LogicalRootNode node,
+                   Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitProjection(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, ProjectionNode node,
+                         Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitLimit(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, LimitNode node,
+                    Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitSort(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, SortNode node,
+                   Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitHaving(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, HavingNode node,
+                      Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitGroupBy(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, GroupbyNode node,
+                      Stack<LogicalNode> stack) throws PlanningException;
+  RESULT visitWindowAgg(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, WindowAggNode node,
+                      Stack<LogicalNode> stack) throws PlanningException;
+  RESULT visitDistinct(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, DistinctGroupbyNode node,
+                                Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitFilter(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, SelectionNode node,
+                     Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitJoin(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode node,
+                   Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitUnion(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, UnionNode node,
+                    Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitExcept(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, ExceptNode node,
+                     Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitIntersect(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, IntersectNode node,
+                        Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitTableSubQuery(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, TableSubQueryNode node,
+                            Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitScan(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, ScanNode node,
+                   Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitPartitionedTableScan(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                   PartitionedTableScanNode node, Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitStoreTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, StoreTableNode node,
+                         Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitInsert(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, InsertNode node,
+                     Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitCreateDatabase(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, CreateDatabaseNode node,
+                          Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitDropDatabase(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, DropDatabaseNode node,
+                             Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitCreateTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, CreateTableNode node,
+                          Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitDropTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, DropTableNode node,
+                        Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitAlterTablespace(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, AlterTablespaceNode node,
+                          Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitAlterTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, AlterTableNode node,
+                         Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitTruncateTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, TruncateTableNode node,
+                         Stack<LogicalNode> stack) throws PlanningException;
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/SimpleAlgebraVisitor.java
----------------------------------------------------------------------
diff --git a/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/SimpleAlgebraVisitor.java b/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/SimpleAlgebraVisitor.java
new file mode 100644
index 0000000..ad4ef18
--- /dev/null
+++ b/tajo-plan/src/main/java/org/apache/tajo/plan/visitor/SimpleAlgebraVisitor.java
@@ -0,0 +1,212 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.plan.visitor;
+
+import org.apache.tajo.algebra.*;
+import org.apache.tajo.plan.PlanningException;
+import org.apache.tajo.plan.algebra.BaseAlgebraVisitor;
+
+import java.util.Stack;
+
+/**
+ * <code>SimpleAlgebraVisitor</code> provides a simple and fewer visit methods. It makes building concrete class easier.
+ */
+public abstract class SimpleAlgebraVisitor<CONTEXT, RESULT> extends BaseAlgebraVisitor<CONTEXT, RESULT> {
+
+  public RESULT visit(CONTEXT ctx, Stack<Expr> stack, Expr expr) throws PlanningException {
+    RESULT result = null;
+    if (expr instanceof UnaryOperator) {
+      preHook(ctx, stack, expr);
+      result = visitUnaryOperator(ctx, stack, (UnaryOperator) expr);
+      postHook(ctx, stack, expr, result);
+    } else if (expr instanceof BinaryOperator) {
+      preHook(ctx, stack, expr);
+      result = visitBinaryOperator(ctx, stack, (BinaryOperator) expr);
+      postHook(ctx, stack, expr, result);
+    } else {
+      result = super.visit(ctx, stack, expr);
+    }
+
+    return result;
+  }
+
+  public RESULT visitUnaryOperator(CONTEXT ctx, Stack<Expr> stack, UnaryOperator expr) throws PlanningException {
+    stack.push(expr);
+    RESULT result = visit(ctx, stack, expr.getChild());
+    stack.pop();
+    return result;
+  }
+
+  public RESULT visitBinaryOperator(CONTEXT ctx, Stack<Expr> stack, BinaryOperator expr) throws PlanningException {
+    stack.push(expr);
+    visit(ctx, stack, expr.getLeft());
+    RESULT result = visit(ctx, stack, expr.getRight());
+    stack.pop();
+    return result;
+  }
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Relational Operator Section
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+  @Override
+  public RESULT visitProjection(CONTEXT ctx, Stack<Expr> stack, Projection expr) throws PlanningException {
+    return super.visitProjection(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitLimit(CONTEXT ctx, Stack<Expr> stack, Limit expr) throws PlanningException {
+    return super.visitLimit(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitSort(CONTEXT ctx, Stack<Expr> stack, Sort expr) throws PlanningException {
+    return super.visitSort(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitHaving(CONTEXT ctx, Stack<Expr> stack, Having expr) throws PlanningException {
+    return super.visitHaving(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitGroupBy(CONTEXT ctx, Stack<Expr> stack, Aggregation expr) throws PlanningException {
+    return super.visitGroupBy(ctx, stack, expr);
+  }
+
+  public RESULT visitFilter(CONTEXT ctx, Stack<Expr> stack, Selection expr) throws PlanningException {
+    return super.visitFilter(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitJoin(CONTEXT ctx, Stack<Expr> stack, Join expr) throws PlanningException {
+    return super.visitJoin(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitTableSubQuery(CONTEXT ctx, Stack<Expr> stack, TablePrimarySubQuery expr) throws PlanningException {
+    return super.visitTableSubQuery(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitRelationList(CONTEXT ctx, Stack<Expr> stack, RelationList expr) throws PlanningException {
+    return super.visitRelationList(ctx, stack, expr);
+  }
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Data Definition Language Section
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+  @Override
+  public RESULT visitCreateTable(CONTEXT ctx, Stack<Expr> stack, CreateTable expr) throws PlanningException {
+    return super.visitCreateTable(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitDropTable(CONTEXT ctx, Stack<Expr> stack, DropTable expr) throws PlanningException {
+    return super.visitDropTable(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitAlterTable(CONTEXT ctx, Stack<Expr> stack, AlterTable expr) throws PlanningException {
+    return super.visitAlterTable(ctx, stack, expr);
+  }
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Insert or Update Section
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  @Override
+  public RESULT visitInsert(CONTEXT ctx, Stack<Expr> stack, Insert expr) throws PlanningException {
+    return super.visitInsert(ctx, stack, expr);
+  }
+
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Other Predicates Section
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+  @Override
+  public RESULT visitBetween(CONTEXT ctx, Stack<Expr> stack, BetweenPredicate expr) throws PlanningException {
+    return super.visitBetween(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitCaseWhen(CONTEXT ctx, Stack<Expr> stack, CaseWhenPredicate expr) throws PlanningException {
+    return super.visitCaseWhen(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitValueListExpr(CONTEXT ctx, Stack<Expr> stack, ValueListExpr expr) throws PlanningException {
+    return super.visitValueListExpr(ctx, stack, expr);
+  }
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Functions and General Set Function Section
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  @Override
+  public RESULT visitFunction(CONTEXT ctx, Stack<Expr> stack, FunctionExpr expr) throws PlanningException {
+    return super.visitFunction(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitCountRowsFunction(CONTEXT ctx, Stack<Expr> stack, CountRowsFunctionExpr expr)
+      throws PlanningException {
+    return super.visitCountRowsFunction(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitGeneralSetFunction(CONTEXT ctx, Stack<Expr> stack, GeneralSetFunctionExpr expr)
+      throws PlanningException {
+    return super.visitGeneralSetFunction(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitWindowFunction(CONTEXT ctx, Stack<Expr> stack, WindowFunctionExpr expr) throws PlanningException {
+    return super.visitWindowFunction(ctx, stack, expr);
+  }
+
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+  // Literal Section
+  ///////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+  @Override
+  public RESULT visitDataType(CONTEXT ctx, Stack<Expr> stack, DataTypeExpr expr) throws PlanningException {
+    return super.visitDataType(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitLiteral(CONTEXT ctx, Stack<Expr> stack, LiteralValue expr) throws PlanningException {
+    return super.visitLiteral(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitNullLiteral(CONTEXT ctx, Stack<Expr> stack, NullLiteral expr) throws PlanningException {
+    return super.visitNullLiteral(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitTimestampLiteral(CONTEXT ctx, Stack<Expr> stack, TimestampLiteral expr) throws PlanningException {
+    return super.visitTimestampLiteral(ctx, stack, expr);
+  }
+
+  @Override
+  public RESULT visitTimeLiteral(CONTEXT ctx, Stack<Expr> stack, TimeLiteral expr) throws PlanningException {
+    return super.visitTimeLiteral(ctx, stack, expr);
+  }
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-plan/src/main/proto/Plan.proto
----------------------------------------------------------------------
diff --git a/tajo-plan/src/main/proto/Plan.proto b/tajo-plan/src/main/proto/Plan.proto
new file mode 100644
index 0000000..8639117
--- /dev/null
+++ b/tajo-plan/src/main/proto/Plan.proto
@@ -0,0 +1,220 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.tajo.plan.serder";
+option java_outer_classname = "PlanProto";
+option java_generic_services = false;
+option java_generate_equals_and_hash = true;
+
+import "PrimitiveProtos.proto";
+import "CatalogProtos.proto";
+import "DataTypes.proto";
+
+enum NodeType {
+  BST_INDEX_SCAN = 0;
+  EXCEPT = 1;
+  EXPRS = 2;
+  DISTINCT_GROUP_BY = 3;
+  GROUP_BY = 4;
+  HAVING = 5;
+  JOIN = 6;
+  INSERT = 7;
+  INTERSECT = 8;
+  LIMIT = 9;
+  PARTITIONS_SCAN = 10;
+  PROJECTION = 11;
+  ROOT = 12;
+  SCAN = 13;
+  SELECTION = 14;
+  SORT = 15;
+  STORE = 16;
+  TABLE_SUBQUERY = 17;
+  UNION = 18;
+  WINDOW_AGG = 19;
+
+  CREATE_DATABASE = 20;
+  DROP_DATABASE = 21;
+  CREATE_TABLE = 22;
+  DROP_TABLE = 23;
+  ALTER_TABLESPACE = 24;
+  ALTER_TABLE = 25;
+  TRUNCATE_TABLE = 26;
+}
+
+message LogicalPlan {
+  required KeyValueSetProto adjacentList = 1;
+}
+
+message LogicalNode {
+  required int32 pid = 1;
+  required NodeType type = 2;
+  required SchemaProto in_schema = 3;
+  required SchemaProto out_schema = 4;
+  required NodeSpec spec = 5;
+}
+
+message NodeSpec {
+  optional ScanNode scan = 1;
+}
+
+message ScanNode {
+  required TableDescProto table = 1;
+  optional string alias = 2;
+  required SchemaProto schema = 3;
+}
+
+
+enum EvalType {
+  NOT = 0;
+  AND = 1;
+  OR = 2;
+  EQUAL = 3;
+  IS_NULL = 4;
+  NOT_EQUAL = 5;
+  LTH = 6;
+  LEQ = 7;
+  GTH = 8;
+  GEQ = 9;
+  PLUS = 10;
+  MINUS = 11;
+  MODULAR = 12;
+  MULTIPLY = 13;
+  DIVIDE = 14;
+
+  // Binary Bitwise expressions
+  BIT_AND = 15;
+  BIT_OR = 16;
+  BIT_XOR = 17;
+
+  // Function
+  WINDOW_FUNCTION = 18;
+  AGG_FUNCTION = 19;
+  FUNCTION = 20;
+
+  // String operator or pattern matching predicates
+  LIKE = 21;
+  SIMILAR_TO = 22;
+  REGEX = 23;
+  CONCATENATE = 24;
+
+  // Other predicates
+  BETWEEN = 25;
+  CASE = 26;
+  IF_THEN = 27;
+  IN = 28;
+
+  // Value or Reference
+  SIGNED = 29;
+  CAST = 30;
+  ROW_CONSTANT = 31;
+  FIELD = 32;
+  CONST = 33;
+}
+
+message EvalTree {
+  repeated EvalNode nodes = 1;
+}
+
+message EvalNode {
+  required int32 id = 1;
+  required EvalType type = 2;
+  required DataType data_type = 3;
+
+  optional UnaryEval unary = 4; // NOT
+  optional BinaryEval binary = 5;
+  optional ConstEval const = 6;
+  optional ColumnProto field = 7; // field eval
+  optional FunctionEval function = 8;
+  optional RowConstEval rowConst = 9;
+  optional BetweenEval between = 10;
+  optional CaseWhenEval casewhen = 11;
+  optional IfCondEval ifCond = 12;
+}
+
+message UnaryEval {
+  required int32 child_id = 1;
+  optional DataType castingType = 2;
+  optional bool negative = 3;
+}
+
+message BinaryEval {
+  required int32 lhs_id = 1;
+  required int32 rhs_id = 2;
+  optional bool negative = 3 [default = false];
+}
+
+message BetweenEval {
+  required int32 predicand = 1;
+  required int32 begin = 2;
+  required int32 end = 3;
+  optional bool negative = 4 [default = false];
+  optional bool symmetric = 5 [default = false];
+}
+
+message CaseWhenEval {
+  repeated int32 ifConds = 1;
+  optional int32 else = 2;
+}
+
+message IfCondEval {
+  required int32 condition = 1;
+  required int32 then = 2;
+}
+
+message ConstEval {
+  required Datum value = 1;
+}
+
+message RowConstEval {
+  repeated Datum values = 1;
+}
+
+message FunctionEval {
+  required FunctionDescProto funcion = 1;
+  repeated int32 paramIds = 2;
+}
+
+message Datum {
+  required Type type = 1;
+  optional bool boolean = 2;
+  optional int32 int4 = 3;    // int4, date
+  optional int64 int8 = 4;    // int8, timestamp, and time
+  optional float float4 = 5;
+  optional double float8 = 6;
+  optional string text = 7;
+  optional bytes blob = 8;
+  optional Interval interval = 12;
+}
+
+message Interval {
+  optional int32 month = 1 [default = 0];
+  optional int64 msec = 2;
+}
+
+enum ShuffleType {
+  NONE_SHUFFLE = 0;
+  HASH_SHUFFLE = 1;
+  RANGE_SHUFFLE = 2;
+  SCATTERED_HASH_SHUFFLE = 3;
+}
+
+enum TransmitType {
+  PUSH_TRANSMIT = 0;
+  PULL_TRANSMIT = 1;
+  FILE_WRITE = 2;
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-plan/src/test/java/org/apache/tajo/plan/TestExprAnnotator.java
----------------------------------------------------------------------
diff --git a/tajo-plan/src/test/java/org/apache/tajo/plan/TestExprAnnotator.java b/tajo-plan/src/test/java/org/apache/tajo/plan/TestExprAnnotator.java
new file mode 100644
index 0000000..cdf4c1b
--- /dev/null
+++ b/tajo-plan/src/test/java/org/apache/tajo/plan/TestExprAnnotator.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.plan;
+
+import org.apache.tajo.catalog.CatalogUtil;
+import org.junit.Test;
+
+import static org.apache.tajo.common.TajoDataTypes.Type;
+import static org.junit.Assert.assertEquals;
+
+public class TestExprAnnotator {
+  @Test
+  public void testGetWidestType() throws Exception {
+    assertEquals(Type.INT1,
+        CatalogUtil.getWidestType(CatalogUtil.newSimpleDataType(Type.INT1)).getType());
+    assertEquals(Type.INT2,
+        CatalogUtil.getWidestType(CatalogUtil.newSimpleDataTypeArray(Type.INT1, Type.INT2)).getType());
+    assertEquals(Type.INT4,
+        CatalogUtil.getWidestType(CatalogUtil.newSimpleDataTypeArray(Type.INT1, Type.INT2, Type.INT4)).getType());
+    assertEquals(Type.INT8,
+        CatalogUtil.getWidestType(CatalogUtil.newSimpleDataTypeArray(Type.INT1, Type.INT2, Type.INT4,
+            Type.INT8)).getType());
+    assertEquals(Type.FLOAT4,
+        CatalogUtil.getWidestType(CatalogUtil.newSimpleDataTypeArray(Type.FLOAT4, Type.INT2, Type.INT4,
+            Type.INT8)).getType());
+    assertEquals(Type.FLOAT8,
+        CatalogUtil.getWidestType(CatalogUtil.newSimpleDataTypeArray(Type.FLOAT4, Type.FLOAT8, Type.INT4,
+            Type.INT8)).getType());
+  }
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-plan/src/test/java/org/apache/tajo/plan/TestLogicalNode.java
----------------------------------------------------------------------
diff --git a/tajo-plan/src/test/java/org/apache/tajo/plan/TestLogicalNode.java b/tajo-plan/src/test/java/org/apache/tajo/plan/TestLogicalNode.java
new file mode 100644
index 0000000..bff7efb
--- /dev/null
+++ b/tajo-plan/src/test/java/org/apache/tajo/plan/TestLogicalNode.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.plan;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.tajo.catalog.CatalogUtil;
+import org.apache.tajo.catalog.Column;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.catalog.proto.CatalogProtos.StoreType;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.plan.logical.GroupbyNode;
+import org.apache.tajo.plan.logical.JoinNode;
+import org.apache.tajo.plan.logical.LogicalNode;
+import org.apache.tajo.plan.logical.ScanNode;
+import org.junit.Test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class TestLogicalNode {
+
+  @Test
+  public void testEquals() {
+    Schema schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    schema.addColumn("age", Type.INT2);
+    GroupbyNode groupbyNode = new GroupbyNode(0);
+    groupbyNode.setGroupingColumns(new Column[]{schema.getColumn(1), schema.getColumn(2)});
+    ScanNode scanNode = new ScanNode(0);
+    scanNode.init(CatalogUtil.newTableDesc("in", schema, CatalogUtil.newTableMeta(StoreType.CSV), new Path("in")));
+
+    GroupbyNode groupbyNode2 = new GroupbyNode(0);
+    groupbyNode2.setGroupingColumns(new Column[]{schema.getColumn(1), schema.getColumn(2)});
+    JoinNode joinNode = new JoinNode(0);
+    ScanNode scanNode2 = new ScanNode(0);
+    scanNode2.init(CatalogUtil.newTableDesc("in2", schema, CatalogUtil.newTableMeta(StoreType.CSV), new Path("in2")));
+
+    groupbyNode.setChild(scanNode);
+    groupbyNode2.setChild(joinNode);
+    joinNode.setLeftChild(scanNode);
+    joinNode.setRightChild(scanNode2);
+
+    assertTrue(groupbyNode.equals(groupbyNode2));
+    assertFalse(groupbyNode.deepEquals(groupbyNode2));
+
+    ScanNode scanNode3 = new ScanNode(0);
+    scanNode3.init(CatalogUtil.newTableDesc("in", schema, CatalogUtil.newTableMeta(StoreType.CSV), new Path("in")));
+    groupbyNode2.setChild(scanNode3);
+
+    assertTrue(groupbyNode.equals(groupbyNode2));
+    assertTrue(groupbyNode.deepEquals(groupbyNode2));
+  }
+}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-project/pom.xml
----------------------------------------------------------------------
diff --git a/tajo-project/pom.xml b/tajo-project/pom.xml
index 908fd18..97489da 100644
--- a/tajo-project/pom.xml
+++ b/tajo-project/pom.xml
@@ -729,6 +729,11 @@
       </dependency>
       <dependency>
         <groupId>org.apache.tajo</groupId>
+        <artifactId>tajo-plan</artifactId>
+        <version>${tajo.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.tajo</groupId>
         <artifactId>tajo-core</artifactId>
         <version>${tajo.version}</version>
         <type>test-jar</type>

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java
deleted file mode 100644
index 4b238f9..0000000
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageConstants.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.storage;
-
-import parquet.hadoop.ParquetWriter;
-import parquet.hadoop.metadata.CompressionCodecName;
-
-public class StorageConstants {
-  // table options
-  public static final String COMPRESSION_CODEC = "compression.codec";
-  public static final String COMPRESSION_TYPE = "compression.type";
-
-  public static final String CSVFILE_DELIMITER = "csvfile.delimiter";
-  public static final String CSVFILE_NULL = "csvfile.null";
-  public static final String CSVFILE_SERDE = "csvfile.serde";
-
-
-  public static final String SEQUENCEFILE_DELIMITER = "sequencefile.delimiter";
-  public static final String SEQUENCEFILE_NULL = "sequencefile.null";
-  public static final String SEQUENCEFILE_SERDE = "sequencefile.serde";
-
-  public static final String RCFILE_NULL = "rcfile.null";
-  public static final String RCFILE_SERDE = "rcfile.serde";
-
-  public static final String DEFAULT_FIELD_DELIMITER = "|";
-  public static final String DEFAULT_BINARY_SERDE = BinarySerializerDeserializer.class.getName();
-  public static final String DEFAULT_TEXT_SERDE = TextSerializerDeserializer.class.getName();
-
-  public static final String PARQUET_DEFAULT_BLOCK_SIZE;
-  public static final String PARQUET_DEFAULT_PAGE_SIZE;
-  public static final String PARQUET_DEFAULT_COMPRESSION_CODEC_NAME;
-  public static final String PARQUET_DEFAULT_IS_DICTIONARY_ENABLED;
-  public static final String PARQUET_DEFAULT_IS_VALIDATION_ENABLED;
-
-  public static final String AVRO_SCHEMA_LITERAL = "avro.schema.literal";
-  public static final String AVRO_SCHEMA_URL = "avro.schema.url";
-
-  static {
-    PARQUET_DEFAULT_BLOCK_SIZE =
-        Integer.toString(ParquetWriter.DEFAULT_BLOCK_SIZE);
-    PARQUET_DEFAULT_PAGE_SIZE =
-        Integer.toString(ParquetWriter.DEFAULT_PAGE_SIZE);
-
-    // When parquet-hadoop 1.3.3 is available, this should be changed to
-    // ParquetWriter.DEFAULT_COMPRESSION_CODEC_NAME.
-    PARQUET_DEFAULT_COMPRESSION_CODEC_NAME =
-        CompressionCodecName.UNCOMPRESSED.name().toLowerCase();
-
-    // When parquet-hadoop 1.3.3 is available, this should be changed to
-    // ParquetWriter.DEFAULT_IS_DICTIONARY_ENABLED.
-    PARQUET_DEFAULT_IS_DICTIONARY_ENABLED = "true";
-
-    // When parquet-hadoop 1.3.3 is available, this should be changed to
-    // ParquetWriter.DEFAULT_IS_VALIDATING_ENABLED.
-    PARQUET_DEFAULT_IS_VALIDATION_ENABLED = "false";
-  }
-}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
index 07fa16b..1789cc9 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageUtil.java
@@ -111,26 +111,6 @@ public class StorageUtil extends StorageConstants {
     return new Path(parent, sb.toString());
   }
 
-  public static KeyValueSet newPhysicalProperties(CatalogProtos.StoreType type) {
-    KeyValueSet options = new KeyValueSet();
-    if (CatalogProtos.StoreType.CSV == type) {
-      options.set(CSVFILE_DELIMITER, DEFAULT_FIELD_DELIMITER);
-    } else if (CatalogProtos.StoreType.RCFILE == type) {
-      options.set(RCFILE_SERDE, DEFAULT_BINARY_SERDE);
-    } else if (CatalogProtos.StoreType.SEQUENCEFILE == type) {
-      options.set(SEQUENCEFILE_SERDE, DEFAULT_TEXT_SERDE);
-      options.set(SEQUENCEFILE_DELIMITER, DEFAULT_FIELD_DELIMITER);
-    } else if (type == CatalogProtos.StoreType.PARQUET) {
-      options.set(ParquetOutputFormat.BLOCK_SIZE, PARQUET_DEFAULT_BLOCK_SIZE);
-      options.set(ParquetOutputFormat.PAGE_SIZE, PARQUET_DEFAULT_PAGE_SIZE);
-      options.set(ParquetOutputFormat.COMPRESSION, PARQUET_DEFAULT_COMPRESSION_CODEC_NAME);
-      options.set(ParquetOutputFormat.ENABLE_DICTIONARY, PARQUET_DEFAULT_IS_DICTIONARY_ENABLED);
-      options.set(ParquetOutputFormat.VALIDATION, PARQUET_DEFAULT_IS_VALIDATION_ENABLED);
-    }
-
-    return options;
-  }
-
   static final String fileNamePatternV08 = "part-[0-9]*-[0-9]*";
   static final String fileNamePatternV09 = "part-[0-9]*-[0-9]*-[0-9]*";
 

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-storage/src/main/java/org/apache/tajo/storage/Tuple.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/Tuple.java b/tajo-storage/src/main/java/org/apache/tajo/storage/Tuple.java
deleted file mode 100644
index c183171..0000000
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/Tuple.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.storage;
-
-import org.apache.tajo.datum.Datum;
-import org.apache.tajo.datum.ProtobufDatum;
-
-public interface Tuple extends Cloneable {
-  
-	public int size();
-	
-	public boolean contains(int fieldid);
-
-  public boolean isNull(int fieldid);
-	
-	public void clear();
-	
-	public void put(int fieldId, Datum value);
-
-  public void put(int fieldId, Datum [] values);
-
-  public void put(int fieldId, Tuple tuple);
-	
-	public void put(Datum [] values);
-	
-	public Datum get(int fieldId);
-	
-	public void setOffset(long offset);
-	
-	public long getOffset();
-
-	public boolean getBool(int fieldId);
-
-	public byte getByte(int fieldId);
-
-  public char getChar(int fieldId);
-	
-	public byte [] getBytes(int fieldId);
-	
-	public short getInt2(int fieldId);
-	
-	public int getInt4(int fieldId);
-	
-	public long getInt8(int fieldId);
-	
-	public float getFloat4(int fieldId);
-	
-	public double getFloat8(int fieldId);
-	
-	public String getText(int fieldId);
-
-  public ProtobufDatum getProtobufDatum(int fieldId);
-
-  public char [] getUnicodeChars(int fieldId);
-
-  public Tuple clone() throws CloneNotSupportedException;
-
-  public Datum[] getValues();
-}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-storage/src/main/java/org/apache/tajo/storage/VTuple.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/VTuple.java b/tajo-storage/src/main/java/org/apache/tajo/storage/VTuple.java
deleted file mode 100644
index 4fb35f9..0000000
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/VTuple.java
+++ /dev/null
@@ -1,233 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.storage;
-
-import com.google.gson.annotations.Expose;
-import org.apache.tajo.datum.Datum;
-import org.apache.tajo.datum.Inet4Datum;
-import org.apache.tajo.datum.NullDatum;
-import org.apache.tajo.datum.ProtobufDatum;
-import org.apache.tajo.exception.UnimplementedException;
-
-import java.net.InetAddress;
-import java.util.Arrays;
-
-public class VTuple implements Tuple, Cloneable {
-	@Expose public Datum [] values;
-	@Expose private long offset;
-	
-	public VTuple(int size) {
-		values = new Datum[size];
-	}
-
-  public VTuple(Tuple tuple) {
-    this.values = tuple.getValues().clone();
-    this.offset = ((VTuple)tuple).offset;
-  }
-
-  public VTuple(Datum [] datum) {
-    this(datum.length);
-    put(datum);
-  }
-
-	@Override
-	public int size() {	
-		return values.length;
-	}
-	
-	public boolean contains(int fieldId) {
-		return values[fieldId] != null;
-	}
-
-  @Override
-  public boolean isNull(int fieldid) {
-    return values[fieldid] instanceof NullDatum;
-  }
-
-  @Override
-  public void clear() {   
-    for (int i=0; i < values.length; i++) {
-      values[i] = null;
-    }
-  }
-	
-	//////////////////////////////////////////////////////
-	// Setter
-	//////////////////////////////////////////////////////	
-	public void put(int fieldId, Datum value) {
-		values[fieldId] = value;
-	}
-
-  @Override
-  public void put(int fieldId, Datum[] values) {
-    for (int i = fieldId, j = 0; j < values.length; i++, j++) {
-      values[i] = values[j];
-    }
-  }
-
-  @Override
-  public void put(int fieldId, Tuple tuple) {
-    for (int i = fieldId, j = 0; j < tuple.size(); i++, j++) {
-      values[i] = tuple.get(j);
-    }
-  }
-
-  public void put(Datum [] values) {
-    System.arraycopy(values, 0, this.values, 0, size());
-	}
-	
-	//////////////////////////////////////////////////////
-	// Getter
-	//////////////////////////////////////////////////////
-	public Datum get(int fieldId) {
-		return this.values[fieldId];
-	}
-	
-	public void setOffset(long offset) {
-	  this.offset = offset;
-	}
-	
-	public long getOffset() {
-	  return this.offset;
-	}
-	
-	@Override
-	public boolean getBool(int fieldId) {
-		return values[fieldId].asBool();
-	}
-
-  @Override
-	public byte getByte(int fieldId) {
-		return values[fieldId].asByte();
-	}
-
-  @Override
-  public char getChar(int fieldId) {
-    return values[fieldId].asChar();
-  }
-
-  @Override
-	public byte [] getBytes(int fieldId) {
-		return values[fieldId].asByteArray();
-	}
-
-  @Override
-	public short getInt2(int fieldId) {
-		return values[fieldId].asInt2();
-	}
-
-  @Override
-	public int getInt4(int fieldId) {
-		return values[fieldId].asInt4();
-	}
-
-  @Override
-	public long getInt8(int fieldId) {
-		return values[fieldId].asInt8();
-	}
-
-  @Override
-	public float getFloat4(int fieldId) {
-		return values[fieldId].asFloat4();
-	}
-
-  @Override
-	public double getFloat8(int fieldId) {
-		return values[fieldId].asFloat8();
-	}
-
-	public Inet4Datum getIPv4(int fieldId) {
-		return (Inet4Datum) values[fieldId];
-	}
-
-	public byte [] getIPv4Bytes(int fieldId) {
-		return values[fieldId].asByteArray();
-	}
-
-	public InetAddress getIPv6(int fieldId) {
-		throw new UnimplementedException("IPv6 is unsupported yet");
-	}
-
-	public byte[] getIPv6Bytes(int fieldId) {
-	  throw new UnimplementedException("IPv6 is unsupported yet");
-	}
-
-  @Override
-	public String getText(int fieldId) {
-		return values[fieldId].asChars();
-	}
-
-  @Override
-  public ProtobufDatum getProtobufDatum(int fieldId) {
-    return (ProtobufDatum) values[fieldId];
-  }
-
-  @Override
-  public char[] getUnicodeChars(int fieldId) {
-    return values[fieldId].asUnicodeChars();
-  }
-
-  @Override
-  public Tuple clone() throws CloneNotSupportedException {
-    VTuple tuple = (VTuple) super.clone();
-
-    tuple.values = new Datum[size()];
-    System.arraycopy(values, 0, tuple.values, 0, size()); //shallow copy
-    return tuple;
-  }
-
-  public String toString() {
-		boolean first = true;
-		StringBuilder str = new StringBuilder();
-		str.append("(");
-		for(int i=0; i < values.length; i++) {			
-			if(values[i] != null) {
-				if(first) {
-					first = false;
-				} else {
-					str.append(", ");
-				}
-				str.append(i)
-				.append("=>")
-				.append(values[i]);
-			}
-		}
-		str.append(")");
-		return str.toString();
-	}
-
-	@Override
-	public int hashCode() {
-	  return Arrays.hashCode(values);
-	}
-
-  @Override
-  public Datum[] getValues() {
-    return values;
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (obj instanceof Tuple) {
-      Tuple other = (Tuple) obj;
-      return Arrays.equals(getValues(), other.getValues());
-    }
-    return false;
-  }
-}

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java b/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
index c1a96a5..51c612c 100644
--- a/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
+++ b/tajo-storage/src/test/java/org/apache/tajo/storage/TestMergeScanner.java
@@ -108,7 +108,7 @@ public class TestMergeScanner {
 
     KeyValueSet options = new KeyValueSet();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
+    meta.setOptions(CatalogUtil.newPhysicalProperties(storeType));
     if (storeType == StoreType.AVRO) {
       meta.putOption(StorageConstants.AVRO_SCHEMA_LITERAL,
                      TEST_MULTIPLE_FILES_AVRO_SCHEMA);

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
----------------------------------------------------------------------
diff --git a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
index ef5388c..dca21af 100644
--- a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
+++ b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
@@ -246,7 +246,7 @@ public class TestStorages {
     schema.addColumn("score", Type.FLOAT4);
 
     TableMeta meta = CatalogUtil.newTableMeta(storeType);
-    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
+    meta.setOptions(CatalogUtil.newPhysicalProperties(storeType));
     if (storeType == StoreType.AVRO) {
       meta.putOption(StorageConstants.AVRO_SCHEMA_LITERAL,
                      TEST_PROJECTION_AVRO_SCHEMA);
@@ -314,7 +314,7 @@ public class TestStorages {
 
     KeyValueSet options = new KeyValueSet();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
+    meta.setOptions(CatalogUtil.newPhysicalProperties(storeType));
     if (storeType == StoreType.AVRO) {
       String path = FileUtil.getResourcePath("testVariousTypes.avsc").toString();
       meta.putOption(StorageConstants.AVRO_SCHEMA_URL, path);
@@ -380,7 +380,7 @@ public class TestStorages {
 
     KeyValueSet options = new KeyValueSet();
     TableMeta meta = CatalogUtil.newTableMeta(storeType, options);
-    meta.setOptions(StorageUtil.newPhysicalProperties(storeType));
+    meta.setOptions(CatalogUtil.newPhysicalProperties(storeType));
     meta.putOption(StorageConstants.CSVFILE_NULL, "\\\\N");
     meta.putOption(StorageConstants.RCFILE_NULL, "\\\\N");
     meta.putOption(StorageConstants.RCFILE_SERDE, TextSerializerDeserializer.class.getName());

http://git-wip-us.apache.org/repos/asf/tajo/blob/b143f991/tajo-yarn-pullserver/src/main/java/org/apache/tajo/storage/Tuple.java
----------------------------------------------------------------------
diff --git a/tajo-yarn-pullserver/src/main/java/org/apache/tajo/storage/Tuple.java b/tajo-yarn-pullserver/src/main/java/org/apache/tajo/storage/Tuple.java
deleted file mode 100644
index 7b5d9c0..0000000
--- a/tajo-yarn-pullserver/src/main/java/org/apache/tajo/storage/Tuple.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.storage;
-
-import org.apache.tajo.datum.Datum;
-import org.apache.tajo.datum.ProtobufDatum;
-
-public interface Tuple extends Cloneable {
-  
-	public int size();
-	
-	public boolean contains(int fieldid);
-
-  public boolean isNull(int fieldid);
-	
-	public void clear();
-	
-	public void put(int fieldId, Datum value);
-
-  public void put(int fieldId, Datum[] values);
-
-  public void put(int fieldId, Tuple tuple);
-	
-	public void put(Datum[] values);
-	
-	public Datum get(int fieldId);
-	
-	public void setOffset(long offset);
-	
-	public long getOffset();
-
-  @SuppressWarnings("unused")
-	public boolean getBool(int fieldId);
-
-  @SuppressWarnings("unused")
-	public byte getByte(int fieldId);
-
-  @SuppressWarnings("unused")
-  public char getChar(int fieldId);
-
-	public byte [] getBytes(int fieldId);
-
-  @SuppressWarnings("unused")
-	public short getInt2(int fieldId);
-
-  @SuppressWarnings("unused")
-	public int getInt4(int fieldId);
-
-  @SuppressWarnings("unused")
-	public long getInt8(int fieldId);
-
-  @SuppressWarnings("unused")
-	public float getFloat4(int fieldId);
-
-  @SuppressWarnings("unused")
-	public double getFloat8(int fieldId);
-
-  @SuppressWarnings("unused")
-	public String getText(int fieldId);
-
-  @SuppressWarnings("unused")
-  public ProtobufDatum getProtobufDatum(int field);
-
-  public Tuple clone() throws CloneNotSupportedException;
-
-  public Datum[] getValues();
-}


Mime
View raw message