asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ima...@apache.org
Subject [22/85] [abbrv] [partial] incubator-asterixdb-hyracks git commit: Move Pregelix and Hivesterix codebase to new repositories: 1. Move Pregelix codebase to https://github.com/pregelix/pregelix; 2. Move Hivesterix codebase to https://code.google.com/p/hives
Date Fri, 24 Apr 2015 18:45:49 GMT
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
deleted file mode 100644
index abe9e28..0000000
--- a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/lazy/objectinspector/primitive/PrimitiveObjectInspectorFactory.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.serde.lazy.objectinspector.primitive;
-
-import java.util.HashMap;
-
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
-
-/**
- * PrimitiveObjectInspectorFactory is the primary way to create new
- * PrimitiveObjectInspector instances.
- * The reason of having caches here is that ObjectInspector is because
- * ObjectInspectors do not have an internal state - so ObjectInspectors with the
- * same construction parameters should result in exactly the same
- * ObjectInspector.
- */
-public final class PrimitiveObjectInspectorFactory {
-
-    public static final LazyBooleanObjectInspector LazyBooleanObjectInspector = new LazyBooleanObjectInspector();
-    public static final LazyByteObjectInspector LazyByteObjectInspector = new LazyByteObjectInspector();
-    public static final LazyShortObjectInspector LazyShortObjectInspector = new LazyShortObjectInspector();
-    public static final LazyIntObjectInspector LazyIntObjectInspector = new LazyIntObjectInspector();
-    public static final LazyLongObjectInspector LazyLongObjectInspector = new LazyLongObjectInspector();
-    public static final LazyFloatObjectInspector LazyFloatObjectInspector = new LazyFloatObjectInspector();
-    public static final LazyDoubleObjectInspector LazyDoubleObjectInspector = new LazyDoubleObjectInspector();
-    public static final LazyStringObjectInspector LazyStringObjectInspector = new LazyStringObjectInspector(false,
-            (byte) '\\');
-    public static final LazyVoidObjectInspector LazyVoidObjectInspector = new LazyVoidObjectInspector();
-
-    private static HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>> cachedPrimitiveLazyInspectorCache = new HashMap<PrimitiveCategory, AbstractPrimitiveLazyObjectInspector<?>>();
-
-    static {
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BOOLEAN, LazyBooleanObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.BYTE, LazyByteObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.SHORT, LazyShortObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.INT, LazyIntObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.LONG, LazyLongObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.FLOAT, LazyFloatObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.DOUBLE, LazyDoubleObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.STRING, LazyStringObjectInspector);
-        cachedPrimitiveLazyInspectorCache.put(PrimitiveCategory.VOID, LazyVoidObjectInspector);
-    }
-
-    /**
-     * Returns the PrimitiveWritableObjectInspector for the PrimitiveCategory.
-     * 
-     * @param primitiveCategory
-     */
-    public static AbstractPrimitiveLazyObjectInspector<?> getPrimitiveLazyObjectInspector(
-            PrimitiveCategory primitiveCategory) {
-        AbstractPrimitiveLazyObjectInspector<?> result = cachedPrimitiveLazyInspectorCache.get(primitiveCategory);
-        if (result == null) {
-            throw new RuntimeException("Internal error: Cannot find ObjectInspector " + " for " + primitiveCategory);
-        }
-        return result;
-    }
-
-    private PrimitiveObjectInspectorFactory() {
-        // prevent instantiation
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
deleted file mode 100644
index 60d73c6..0000000
--- a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/IHiveParser.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.serde.parser;
-
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-
-public interface IHiveParser {
-    /**
-     * parse one hive rwo into
-     * 
-     * @param row
-     * @param objectInspector
-     * @param tb
-     */
-    public void parse(byte[] data, int start, int length, ArrayTupleBuilder tb) throws IOException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java b/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
deleted file mode 100644
index d2949df..0000000
--- a/hivesterix/hivesterix-serde/src/main/java/edu/uci/ics/hivesterix/serde/parser/TextToBinaryTupleParser.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.serde.parser;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
-import org.apache.hadoop.hive.serde2.lazy.LazyLong;
-import org.apache.hadoop.hive.serde2.lazy.LazyShort;
-import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.StructField;
-import org.apache.hadoop.io.Text;
-
-import edu.uci.ics.hivesterix.serde.lazy.LazyUtils;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-
-public class TextToBinaryTupleParser implements IHiveParser {
-    private int[] invertedIndex;
-    private int[] fieldEnds;
-    private int lastNecessaryFieldIndex;
-    private LazySimpleStructObjectInspector inputObjectInspector;
-    private List<? extends StructField> fieldRefs;
-
-    public TextToBinaryTupleParser(int[] outputColumnsOffset, ObjectInspector structInspector) {
-        int size = 0;
-        for (int i = 0; i < outputColumnsOffset.length; i++)
-            if (outputColumnsOffset[i] >= 0)
-                size++;
-        invertedIndex = new int[size];
-        for (int i = 0; i < outputColumnsOffset.length; i++)
-            if (outputColumnsOffset[i] >= 0) {
-                invertedIndex[outputColumnsOffset[i]] = i;
-                lastNecessaryFieldIndex = i;
-            }
-        fieldEnds = new int[outputColumnsOffset.length];
-        for (int i = 0; i < fieldEnds.length; i++)
-            fieldEnds[i] = 0;
-        inputObjectInspector = (LazySimpleStructObjectInspector) structInspector;
-        fieldRefs = inputObjectInspector.getAllStructFieldRefs();
-    }
-
-    @Override
-    public void parse(byte[] bytes, int start, int length, ArrayTupleBuilder tb) throws IOException {
-        byte separator = inputObjectInspector.getSeparator();
-        boolean lastColumnTakesRest = inputObjectInspector.getLastColumnTakesRest();
-        boolean isEscaped = inputObjectInspector.isEscaped();
-        byte escapeChar = inputObjectInspector.getEscapeChar();
-        DataOutput output = tb.getDataOutput();
-
-        int structByteEnd = start + length - 1;
-        int fieldId = 0;
-        int fieldByteEnd = start;
-
-        // Go through all bytes in the byte[]
-        while (fieldByteEnd <= structByteEnd && fieldId <= lastNecessaryFieldIndex) {
-            if (fieldByteEnd == structByteEnd || bytes[fieldByteEnd] == separator) {
-                // Reached the end of a field?
-                if (lastColumnTakesRest && fieldId == fieldEnds.length - 1) {
-                    fieldByteEnd = structByteEnd;
-                }
-                fieldEnds[fieldId] = fieldByteEnd;
-                if (fieldId == fieldEnds.length - 1 || fieldByteEnd == structByteEnd) {
-                    // for the case of null fields
-                    for (int i = fieldId; i < fieldEnds.length; i++) {
-                        fieldEnds[i] = fieldByteEnd;
-                    }
-                    break;
-                }
-                fieldByteEnd++;
-                fieldId++;
-            } else {
-                if (isEscaped && bytes[fieldByteEnd] == escapeChar && fieldByteEnd + 1 < structByteEnd) {
-                    // ignore the char after escape_char
-                    fieldByteEnd += 2;
-                } else {
-                    fieldByteEnd++;
-                }
-            }
-        }
-
-        for (int i = 0; i < invertedIndex.length; i++) {
-            int index = invertedIndex[i];
-            StructField fieldRef = fieldRefs.get(index);
-            ObjectInspector inspector = fieldRef.getFieldObjectInspector();
-            Category category = inspector.getCategory();
-            int fieldStart = index == 0 ? 0 : fieldEnds[index - 1] + 1;
-            int fieldEnd = fieldEnds[index];
-            if (bytes[fieldEnd] == separator)
-                fieldEnd--;
-            int fieldLen = fieldEnd - fieldStart + 1;
-            switch (category) {
-                case PRIMITIVE:
-                    PrimitiveObjectInspector poi = (PrimitiveObjectInspector) inspector;
-                    switch (poi.getPrimitiveCategory()) {
-                        case VOID: {
-                            break;
-                        }
-                        case BOOLEAN: {
-                            output.write(bytes[fieldStart]);
-                            break;
-                        }
-                        case BYTE: {
-                            output.write(bytes[fieldStart]);
-                            break;
-                        }
-                        case SHORT: {
-                            short v = LazyShort.parseShort(bytes, fieldStart, fieldLen);
-                            output.write((byte) (v >> 8));
-                            output.write((byte) (v));
-                            break;
-                        }
-                        case INT: {
-                            int v = LazyInteger.parseInt(bytes, fieldStart, fieldLen);
-                            LazyUtils.writeVInt(output, v);
-                            break;
-                        }
-                        case LONG: {
-                            long v = LazyLong.parseLong(bytes, fieldStart, fieldLen);
-                            LazyUtils.writeVLong(output, v);
-                            break;
-                        }
-                        case FLOAT: {
-                            float value = Float.parseFloat(Text.decode(bytes, fieldStart, fieldLen));
-                            int v = Float.floatToIntBits(value);
-                            output.write((byte) (v >> 24));
-                            output.write((byte) (v >> 16));
-                            output.write((byte) (v >> 8));
-                            output.write((byte) (v));
-                            break;
-                        }
-                        case DOUBLE: {
-                            try {
-                                double value = Double.parseDouble(Text.decode(bytes, fieldStart, fieldLen));
-                                long v = Double.doubleToLongBits(value);
-                                output.write((byte) (v >> 56));
-                                output.write((byte) (v >> 48));
-                                output.write((byte) (v >> 40));
-                                output.write((byte) (v >> 32));
-                                output.write((byte) (v >> 24));
-                                output.write((byte) (v >> 16));
-                                output.write((byte) (v >> 8));
-                                output.write((byte) (v));
-                            } catch (NumberFormatException e) {
-                                throw e;
-                            }
-                            break;
-                        }
-                        case STRING: {
-                            LazyUtils.writeVInt(output, fieldLen);
-                            output.write(bytes, fieldStart, fieldLen);
-                            break;
-                        }
-                        default: {
-                            throw new RuntimeException("Unrecognized type: " + poi.getPrimitiveCategory());
-                        }
-                    }
-                    break;
-                case STRUCT:
-                    throw new NotImplementedException("Unrecognized type: struct ");
-                case LIST:
-                    throw new NotImplementedException("Unrecognized type: struct ");
-                case MAP:
-                    throw new NotImplementedException("Unrecognized type: struct ");
-                case UNION:
-                    throw new NotImplementedException("Unrecognized type: struct ");
-            }
-            tb.addFieldEndOffset();
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-translator/pom.xml
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-translator/pom.xml b/hivesterix/hivesterix-translator/pom.xml
deleted file mode 100644
index bac73c1..0000000
--- a/hivesterix/hivesterix-translator/pom.xml
+++ /dev/null
@@ -1,66 +0,0 @@
-<?xml version="1.0"?>
-<!--
- ! Copyright 2009-2013 by The Regents of the University of California
- ! Licensed under the Apache License, Version 2.0 (the "License");
- ! you may not use this file except in compliance with the License.
- ! you may obtain a copy of the License from
- ! 
- !     http://www.apache.org/licenses/LICENSE-2.0
- ! 
- ! Unless required by applicable law or agreed to in writing, software
- ! distributed under the License is distributed on an "AS IS" BASIS,
- ! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ! See the License for the specific language governing permissions and
- ! limitations under the License.
- !-->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>hivesterix-translator</artifactId>
-	<name>hivesterix-translator</name>
-
-	<parent>
-		<artifactId>hivesterix</artifactId>
-		<groupId>edu.uci.ics.hyracks</groupId>
-		<version>0.2.14-SNAPSHOT</version>
-	</parent>
-
-	<build>
-		<plugins>
-			<plugin>
-				<groupId>org.apache.maven.plugins</groupId>
-				<artifactId>maven-compiler-plugin</artifactId>
-				<version>2.0.2</version>
-				<configuration>
-					<source>1.7</source>
-					<target>1.7</target>
-					<encoding>UTF-8</encoding>
-					<fork>true</fork>
-				</configuration>
-			</plugin>
-		</plugins>
-	</build>
-
-	<dependencies>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>algebricks-compiler</artifactId>
-			<version>0.2.14-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hivesterix-common</artifactId>
-			<version>0.2.14-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-		<dependency>
-			<groupId>edu.uci.ics.hyracks</groupId>
-			<artifactId>hivesterix-runtime</artifactId>
-			<version>0.2.14-SNAPSHOT</version>
-			<type>jar</type>
-			<scope>compile</scope>
-		</dependency>
-	</dependencies>
-</project>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
deleted file mode 100644
index 76cc51d..0000000
--- a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveAlgebricksTranslator.java
+++ /dev/null
@@ -1,820 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.logical.plan;
-
-import java.io.OutputStreamWriter;
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-import org.apache.hadoop.hive.ql.exec.ColumnInfo;
-import org.apache.hadoop.hive.ql.exec.ExtractOperator;
-import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
-import org.apache.hadoop.hive.ql.exec.FilterOperator;
-import org.apache.hadoop.hive.ql.exec.GroupByOperator;
-import org.apache.hadoop.hive.ql.exec.JoinOperator;
-import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;
-import org.apache.hadoop.hive.ql.exec.LimitOperator;
-import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
-import org.apache.hadoop.hive.ql.exec.SelectOperator;
-import org.apache.hadoop.hive.ql.exec.TableScanOperator;
-import org.apache.hadoop.hive.ql.exec.UDF;
-import org.apache.hadoop.hive.ql.exec.UDTFOperator;
-import org.apache.hadoop.hive.ql.exec.UnionOperator;
-import org.apache.hadoop.hive.ql.plan.AggregationDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeFieldDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
-import org.apache.hadoop.hive.ql.plan.PartitionDesc;
-import org.apache.hadoop.hive.ql.plan.UDTFDesc;
-import org.apache.hadoop.hive.ql.plan.api.OperatorType;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-
-import edu.uci.ics.hivesterix.logical.expression.ExpressionConstant;
-import edu.uci.ics.hivesterix.logical.expression.HiveAlgebricksBuiltInFunctionMap;
-import edu.uci.ics.hivesterix.logical.expression.HiveFunctionInfo;
-import edu.uci.ics.hivesterix.logical.expression.HivesterixConstantValue;
-import edu.uci.ics.hivesterix.logical.plan.visitor.ExtractVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.FilterVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.GroupByVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.JoinVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.LateralViewJoinVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.LimitVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.MapJoinVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.ProjectVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.SortVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.TableScanWriteVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.UnionVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.Visitor;
-import edu.uci.ics.hivesterix.runtime.jobgen.Schema;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.UnnestingFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.LogicalOperatorPrettyPrintVisitor;
-import edu.uci.ics.hyracks.algebricks.core.algebra.prettyprint.PlanPrettyPrinter;
-
-@SuppressWarnings("rawtypes")
-public class HiveAlgebricksTranslator implements Translator {
-
-    private int currentVariable = 0;
-
-    private List<Mutable<ILogicalOperator>> logicalOp = new ArrayList<Mutable<ILogicalOperator>>();
-
-    private boolean continueTraverse = true;
-
-    private IMetadataProvider<PartitionDesc, Object> metaData;
-
-    /**
-     * map variable name to the logical variable
-     */
-    private HashMap<String, LogicalVariable> nameToLogicalVariableMap = new HashMap<String, LogicalVariable>();
-
-    /**
-     * map field name to LogicalVariable
-     */
-    private HashMap<String, LogicalVariable> fieldToLogicalVariableMap = new HashMap<String, LogicalVariable>();
-
-    /**
-     * map logical variable to name
-     */
-    private HashMap<LogicalVariable, String> logicalVariableToFieldMap = new HashMap<LogicalVariable, String>();
-
-    /**
-     * asterix root operators
-     */
-    private List<Mutable<ILogicalOperator>> rootOperators = new ArrayList<Mutable<ILogicalOperator>>();
-
-    /**
-     * a list of visitors
-     */
-    private List<Visitor> visitors = new ArrayList<Visitor>();
-
-    /**
-     * output writer to print things out
-     */
-    private static PrintWriter outputWriter = new PrintWriter(new OutputStreamWriter(System.out));
-
-    /**
-     * map a logical variable to type info
-     */
-    private HashMap<LogicalVariable, TypeInfo> variableToType = new HashMap<LogicalVariable, TypeInfo>();
-
-    @Override
-    public LogicalVariable getVariable(String fieldName, TypeInfo type) {
-        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);
-        if (var == null) {
-            currentVariable++;
-            var = new LogicalVariable(currentVariable);
-            fieldToLogicalVariableMap.put(fieldName, var);
-            nameToLogicalVariableMap.put(var.toString(), var);
-            variableToType.put(var, type);
-            logicalVariableToFieldMap.put(var, fieldName);
-        }
-        return var;
-    }
-
-    @Override
-    public LogicalVariable getNewVariable(String fieldName, TypeInfo type) {
-        currentVariable++;
-        LogicalVariable var = new LogicalVariable(currentVariable);
-        fieldToLogicalVariableMap.put(fieldName, var);
-        nameToLogicalVariableMap.put(var.toString(), var);
-        variableToType.put(var, type);
-        logicalVariableToFieldMap.put(var, fieldName);
-        return var;
-    }
-
-    @Override
-    public void replaceVariable(LogicalVariable oldVar, LogicalVariable newVar) {
-        String name = this.logicalVariableToFieldMap.get(oldVar);
-        if (name != null) {
-            fieldToLogicalVariableMap.put(name, newVar);
-            nameToLogicalVariableMap.put(newVar.toString(), newVar);
-            nameToLogicalVariableMap.put(oldVar.toString(), newVar);
-            logicalVariableToFieldMap.put(newVar, name);
-        }
-    }
-
-    @Override
-    public IMetadataProvider<PartitionDesc, Object> getMetadataProvider() {
-        return metaData;
-    }
-
-    /**
-     * only get an variable, without rewriting it
-     * 
-     * @param fieldName
-     * @return
-     */
-    private LogicalVariable getVariableOnly(String fieldName) {
-        return fieldToLogicalVariableMap.get(fieldName);
-    }
-
-    public void updateVariable(String fieldName, LogicalVariable variable) {
-        LogicalVariable var = fieldToLogicalVariableMap.get(fieldName);
-        if (var == null) {
-            fieldToLogicalVariableMap.put(fieldName, variable);
-            nameToLogicalVariableMap.put(fieldName, variable);
-        } else if (!var.equals(variable)) {
-            fieldToLogicalVariableMap.put(fieldName, variable);
-            nameToLogicalVariableMap.put(fieldName, variable);
-        }
-    }
-
-    /**
-     * get a list of logical variables from the schema
-     * 
-     * @param schema
-     * @return
-     */
-    @Override
-    public List<LogicalVariable> getVariablesFromSchema(Schema schema) {
-        List<LogicalVariable> variables = new ArrayList<LogicalVariable>();
-        List<String> names = schema.getNames();
-
-        for (String name : names)
-            variables.add(nameToLogicalVariableMap.get(name));
-        return variables;
-    }
-
-    /**
-     * get variable to typeinfo map
-     * 
-     * @return
-     */
-    public HashMap<LogicalVariable, TypeInfo> getVariableContext() {
-        return this.variableToType;
-    }
-
-    /**
-     * get the number of variables s
-     * 
-     * @return
-     */
-    public int getVariableCounter() {
-        return currentVariable + 1;
-    }
-
-    /**
-     * translate from hive operator tree to asterix operator tree
-     * 
-     * @param hive
-     *            roots
-     * @return Algebricks roots
-     */
-    public void translate(List<Operator> hiveRoot, ILogicalOperator parentOperator,
-            HashMap<String, PartitionDesc> aliasToPathMap) throws AlgebricksException {
-        /**
-         * register visitors
-         */
-        visitors.add(new FilterVisitor());
-        visitors.add(new GroupByVisitor());
-        visitors.add(new JoinVisitor());
-        visitors.add(new LateralViewJoinVisitor());
-        visitors.add(new UnionVisitor());
-        visitors.add(new LimitVisitor());
-        visitors.add(new MapJoinVisitor());
-        visitors.add(new ProjectVisitor());
-        visitors.add(new SortVisitor());
-        visitors.add(new ExtractVisitor());
-        visitors.add(new TableScanWriteVisitor(aliasToPathMap));
-
-        List<Mutable<ILogicalOperator>> refList = translate(hiveRoot, new MutableObject<ILogicalOperator>(
-                parentOperator));
-        insertReplicateOperator(refList);
-        if (refList != null)
-            rootOperators.addAll(refList);
-    }
-
-    /**
-     * translate operator DAG
-     * 
-     * @param hiveRoot
-     * @param AlgebricksParentOperator
-     * @return
-     */
-    private List<Mutable<ILogicalOperator>> translate(List<Operator> hiveRoot,
-            Mutable<ILogicalOperator> AlgebricksParentOperator) throws AlgebricksException {
-
-        for (Operator hiveOperator : hiveRoot) {
-            continueTraverse = true;
-            Mutable<ILogicalOperator> currentOperatorRef = null;
-            if (hiveOperator.getType() == OperatorType.FILTER) {
-                FilterOperator fop = (FilterOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.REDUCESINK) {
-                ReduceSinkOperator fop = (ReduceSinkOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.JOIN) {
-                JoinOperator fop = (JoinOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null) {
-                        continueTraverse = true;
-                        break;
-                    } else
-                        continueTraverse = false;
-                }
-                if (currentOperatorRef == null)
-                    return null;
-            } else if (hiveOperator.getType() == OperatorType.LATERALVIEWJOIN) {
-                LateralViewJoinOperator fop = (LateralViewJoinOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-                if (currentOperatorRef == null)
-                    return null;
-            } else if (hiveOperator.getType() == OperatorType.MAPJOIN) {
-                MapJoinOperator fop = (MapJoinOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null) {
-                        continueTraverse = true;
-                        break;
-                    } else
-                        continueTraverse = false;
-                }
-                if (currentOperatorRef == null)
-                    return null;
-            } else if (hiveOperator.getType() == OperatorType.SELECT) {
-                SelectOperator fop = (SelectOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.EXTRACT) {
-                ExtractOperator fop = (ExtractOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.GROUPBY) {
-                GroupByOperator fop = (GroupByOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.TABLESCAN) {
-                TableScanOperator fop = (TableScanOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.FILESINK) {
-                FileSinkOperator fop = (FileSinkOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(fop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.LIMIT) {
-                LimitOperator lop = (LimitOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.UDTF) {
-                UDTFOperator lop = (UDTFOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null)
-                        break;
-                }
-            } else if (hiveOperator.getType() == OperatorType.UNION) {
-                UnionOperator lop = (UnionOperator) hiveOperator;
-                for (Visitor visitor : visitors) {
-                    currentOperatorRef = visitor.visit(lop, AlgebricksParentOperator, this);
-                    if (currentOperatorRef != null) {
-                        continueTraverse = true;
-                        break;
-                    } else
-                        continueTraverse = false;
-                }
-            } else
-                ;
-            if (hiveOperator.getChildOperators() != null && hiveOperator.getChildOperators().size() > 0
-                    && continueTraverse) {
-                @SuppressWarnings("unchecked")
-                List<Operator> children = hiveOperator.getChildOperators();
-                if (currentOperatorRef == null)
-                    currentOperatorRef = AlgebricksParentOperator;
-                translate(children, currentOperatorRef);
-            }
-            if (hiveOperator.getChildOperators() == null || hiveOperator.getChildOperators().size() == 0)
-                logicalOp.add(currentOperatorRef);
-        }
-        return logicalOp;
-    }
-
-    /**
-     * used in select, group by to get no-column-expression columns
-     * 
-     * @param cols
-     * @return
-     */
-    public ILogicalOperator getAssignOperator(Mutable<ILogicalOperator> parent, List<ExprNodeDesc> cols,
-            ArrayList<LogicalVariable> variables) {
-
-        ArrayList<Mutable<ILogicalExpression>> expressions = new ArrayList<Mutable<ILogicalExpression>>();
-
-        /**
-         * variables to be appended in the assign operator
-         */
-        ArrayList<LogicalVariable> appendedVariables = new ArrayList<LogicalVariable>();
-
-        // one variable can only be assigned once
-        for (ExprNodeDesc hiveExpr : cols) {
-            rewriteExpression(hiveExpr);
-
-            if (hiveExpr instanceof ExprNodeColumnDesc) {
-                ExprNodeColumnDesc desc2 = (ExprNodeColumnDesc) hiveExpr;
-                String fieldName = desc2.getTabAlias() + "." + desc2.getColumn();
-
-                // System.out.println("project expr: " + fieldName);
-
-                if (fieldName.indexOf("$$") < 0) {
-                    LogicalVariable var = getVariable(fieldName, hiveExpr.getTypeInfo());
-                    desc2.setColumn(var.toString());
-                    desc2.setTabAlias("");
-                    variables.add(var);
-                } else {
-                    LogicalVariable var = nameToLogicalVariableMap.get(desc2.getColumn());
-                    String name = this.logicalVariableToFieldMap.get(var);
-                    var = this.getVariableOnly(name);
-                    variables.add(var);
-                }
-            } else {
-                Mutable<ILogicalExpression> asterixExpr = translateScalarFucntion(hiveExpr);
-                expressions.add(asterixExpr);
-                LogicalVariable var = getVariable(hiveExpr.getExprString() + asterixExpr.hashCode(),
-                        hiveExpr.getTypeInfo());
-                variables.add(var);
-                appendedVariables.add(var);
-            }
-        }
-
-        /**
-         * create an assign operator to deal with appending
-         */
-        ILogicalOperator assignOp = null;
-        if (appendedVariables.size() > 0) {
-            assignOp = new AssignOperator(appendedVariables, expressions);
-            assignOp.getInputs().add(parent);
-        }
-        return assignOp;
-    }
-
-    private ILogicalPlan plan;
-
-    public ILogicalPlan genLogicalPlan() {
-        plan = new ALogicalPlanImpl(rootOperators);
-        return plan;
-    }
-
-    public void printOperators() throws AlgebricksException {
-        LogicalOperatorPrettyPrintVisitor pvisitor = new LogicalOperatorPrettyPrintVisitor();
-        StringBuilder buffer = new StringBuilder();
-        PlanPrettyPrinter.printPlan(plan, buffer, pvisitor, 0);
-        outputWriter.println(buffer);
-        outputWriter.println("rewritten variables: ");
-        outputWriter.flush();
-        printVariables();
-
-    }
-
-    public static void setOutputPrinter(PrintWriter writer) {
-        outputWriter = writer;
-    }
-
-    private void printVariables() {
-        Set<Entry<String, LogicalVariable>> entries = fieldToLogicalVariableMap.entrySet();
-
-        for (Entry<String, LogicalVariable> entry : entries) {
-            outputWriter.println(entry.getKey() + " -> " + entry.getValue());
-        }
-        outputWriter.flush();
-    }
-
-    /**
-     * generate the object inspector for the output of an operator
-     * 
-     * @param operator
-     *            The Hive operator
-     * @return an ObjectInspector object
-     */
-    public Schema generateInputSchema(Operator operator) {
-        List<String> variableNames = new ArrayList<String>();
-        List<TypeInfo> typeList = new ArrayList<TypeInfo>();
-        List<ColumnInfo> columns = operator.getSchema().getSignature();
-
-        for (ColumnInfo col : columns) {
-            // typeList.add();
-            TypeInfo type = col.getType();
-            typeList.add(type);
-
-            String fieldName = col.getInternalName();
-            variableNames.add(fieldName);
-        }
-
-        return new Schema(variableNames, typeList);
-    }
-
-    /**
-     * rewrite the names of output columns for feature expression evaluators to
-     * use
-     * 
-     * @param operator
-     */
-    public void rewriteOperatorOutputSchema(Operator operator) {
-        List<ColumnInfo> columns = operator.getSchema().getSignature();
-        for (ColumnInfo column : columns) {
-            String columnName = column.getTabAlias() + "." + column.getInternalName();
-            if (columnName.indexOf("$$") < 0) {
-                LogicalVariable var = getVariable(columnName, column.getType());
-                column.setInternalName(var.toString());
-            }
-        }
-    }
-
-    @Override
-    public void rewriteOperatorOutputSchema(List<LogicalVariable> variables, Operator operator) {
-        // printOperatorSchema(operator);
-        List<ColumnInfo> columns = operator.getSchema().getSignature();
-        // if (variables.size() != columns.size()) {
-        // throw new IllegalStateException("output cardinality error " +
-        // operator.getName() + " variable size: "
-        // + variables.size() + " expected " + columns.size());
-        // }
-        for (int i = 0; i < variables.size(); i++) {
-            LogicalVariable var = variables.get(i);
-            ColumnInfo column = columns.get(i);
-            String fieldName = column.getTabAlias() + "." + column.getInternalName();
-            if (fieldName.indexOf("$$") < 0) {
-                updateVariable(fieldName, var);
-                column.setInternalName(var.toString());
-            }
-        }
-
-        // printOperatorSchema(operator);
-    }
-
-    /**
-     * rewrite an expression and substitute variables
-     * 
-     * @param expr
-     *            hive expression
-     */
-    public void rewriteExpression(ExprNodeDesc expr) {
-        if (expr instanceof ExprNodeColumnDesc) {
-            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;
-            String fieldName = desc.getTabAlias() + "." + desc.getColumn();
-            if (fieldName.indexOf("$$") < 0) {
-                LogicalVariable var = getVariableOnly(fieldName);
-                if (var == null) {
-                    fieldName = "." + desc.getColumn();
-                    var = getVariableOnly(fieldName);
-                    if (var == null) {
-                        fieldName = "null." + desc.getColumn();
-                        var = getVariableOnly(fieldName);
-                        if (var == null) {
-                            throw new IllegalStateException(fieldName + " is wrong!!! ");
-                        }
-                    }
-                }
-                String name = this.logicalVariableToFieldMap.get(var);
-                var = getVariableOnly(name);
-                desc.setColumn(var.toString());
-            }
-        } else {
-            if (expr.getChildren() != null && expr.getChildren().size() > 0) {
-                List<ExprNodeDesc> children = expr.getChildren();
-                for (ExprNodeDesc desc : children)
-                    rewriteExpression(desc);
-            }
-        }
-    }
-
-    /**
-     * rewrite an expression and substitute variables
-     * 
-     * @param expr
-     *            hive expression
-     */
-    public void rewriteExpressionPartial(ExprNodeDesc expr) {
-        if (expr instanceof ExprNodeColumnDesc) {
-            ExprNodeColumnDesc desc = (ExprNodeColumnDesc) expr;
-            String fieldName = desc.getTabAlias() + "." + desc.getColumn();
-            if (fieldName.indexOf("$$") < 0) {
-                LogicalVariable var = getVariableOnly(fieldName);
-                desc.setColumn(var.toString());
-            }
-        } else {
-            if (expr.getChildren() != null && expr.getChildren().size() > 0) {
-                List<ExprNodeDesc> children = expr.getChildren();
-                for (ExprNodeDesc desc : children)
-                    rewriteExpressionPartial(desc);
-            }
-        }
-    }
-
-    // private void printOperatorSchema(Operator operator) {
-    // // System.out.println(operator.getName());
-    // // List<ColumnInfo> columns = operator.getSchema().getSignature();
-    // // for (ColumnInfo column : columns) {
-    // // System.out.print(column.getTabAlias() + "." +
-    // // column.getInternalName() + "  ");
-    // // }
-    // // System.out.println();
-    // }
-
-    /**
-     * translate scalar function expression
-     * 
-     * @param hiveExpr
-     * @return
-     */
-    public Mutable<ILogicalExpression> translateScalarFucntion(ExprNodeDesc hiveExpr) {
-        ILogicalExpression AlgebricksExpr;
-
-        if (hiveExpr instanceof ExprNodeGenericFuncDesc) {
-            List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
-            List<ExprNodeDesc> children = hiveExpr.getChildren();
-
-            for (ExprNodeDesc child : children)
-                arguments.add(translateScalarFucntion(child));
-
-            ExprNodeGenericFuncDesc funcExpr = (ExprNodeGenericFuncDesc) hiveExpr;
-            GenericUDF genericUdf = funcExpr.getGenericUDF();
-            UDF udf = null;
-            if (genericUdf instanceof GenericUDFBridge) {
-                GenericUDFBridge bridge = (GenericUDFBridge) genericUdf;
-                try {
-                    udf = bridge.getUdfClass().newInstance();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-            }
-
-            /**
-             * set up the hive function
-             */
-            Object hiveFunction = genericUdf;
-            if (udf != null)
-                hiveFunction = udf;
-
-            FunctionIdentifier funcId = HiveAlgebricksBuiltInFunctionMap.INSTANCE.getAlgebricksFunctionId(hiveFunction
-                    .getClass());
-            if (funcId == null) {
-                funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, hiveFunction.getClass().getName());
-            }
-
-            Object functionInfo = null;
-            if (genericUdf instanceof GenericUDFBridge) {
-                functionInfo = funcExpr;
-            }
-
-            /**
-             * generate the function call expression
-             */
-            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(
-                    funcId, functionInfo), arguments);
-            AlgebricksExpr = AlgebricksFuncExpr;
-
-        } else if (hiveExpr instanceof ExprNodeColumnDesc) {
-            ExprNodeColumnDesc column = (ExprNodeColumnDesc) hiveExpr;
-            LogicalVariable var = this.getVariable(column.getColumn());
-            AlgebricksExpr = new VariableReferenceExpression(var);
-
-        } else if (hiveExpr instanceof ExprNodeFieldDesc) {
-            FunctionIdentifier funcId;
-            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.FIELDACCESS);
-
-            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(
-                    funcId, hiveExpr));
-            AlgebricksExpr = AlgebricksFuncExpr;
-        } else if (hiveExpr instanceof ExprNodeConstantDesc) {
-            ExprNodeConstantDesc hiveConst = (ExprNodeConstantDesc) hiveExpr;
-            Object value = hiveConst.getValue();
-            AlgebricksExpr = new ConstantExpression(new HivesterixConstantValue(value));
-        } else if (hiveExpr instanceof ExprNodeNullDesc) {
-            FunctionIdentifier funcId;
-            funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, ExpressionConstant.NULL);
-
-            ScalarFunctionCallExpression AlgebricksFuncExpr = new ScalarFunctionCallExpression(new HiveFunctionInfo(
-                    funcId, hiveExpr));
-
-            AlgebricksExpr = AlgebricksFuncExpr;
-        } else {
-            throw new IllegalStateException("unknown hive expression");
-        }
-        return new MutableObject<ILogicalExpression>(AlgebricksExpr);
-    }
-
-    /**
-     * translate aggregation function expression
-     * 
-     * @param aggregateDesc
-     * @return
-     */
-    public Mutable<ILogicalExpression> translateAggregation(AggregationDesc aggregateDesc) {
-
-        String UDAFName = aggregateDesc.getGenericUDAFName();
-
-        List<Mutable<ILogicalExpression>> arguments = new ArrayList<Mutable<ILogicalExpression>>();
-        List<ExprNodeDesc> children = aggregateDesc.getParameters();
-
-        for (ExprNodeDesc child : children)
-            arguments.add(translateScalarFucntion(child));
-
-        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDAFName + "("
-                + aggregateDesc.getMode() + ")");
-        HiveFunctionInfo funcInfo = new HiveFunctionInfo(funcId, aggregateDesc);
-        AggregateFunctionCallExpression aggregationExpression = new AggregateFunctionCallExpression(funcInfo, false,
-                arguments);
-        return new MutableObject<ILogicalExpression>(aggregationExpression);
-    }
-
-    /**
-     * translate aggregation function expression
-     * 
-     * @param aggregator
-     * @return
-     */
-    public Mutable<ILogicalExpression> translateUnnestFunction(UDTFDesc udtfDesc, Mutable<ILogicalExpression> argument) {
-
-        String UDTFName = udtfDesc.getUDTFName();
-
-        FunctionIdentifier funcId = new FunctionIdentifier(ExpressionConstant.NAMESPACE, UDTFName);
-        UnnestingFunctionCallExpression unnestingExpression = new UnnestingFunctionCallExpression(new HiveFunctionInfo(
-                funcId, udtfDesc));
-        unnestingExpression.getArguments().add(argument);
-        return new MutableObject<ILogicalExpression>(unnestingExpression);
-    }
-
-    /**
-     * get typeinfo
-     */
-    @Override
-    public TypeInfo getType(LogicalVariable var) {
-        return variableToType.get(var);
-    }
-
-    /**
-     * get variable from variable name
-     */
-    @Override
-    public LogicalVariable getVariable(String name) {
-        return nameToLogicalVariableMap.get(name);
-    }
-
-    @Override
-    public LogicalVariable getVariableFromFieldName(String fieldName) {
-        return this.getVariableOnly(fieldName);
-    }
-
-    /**
-     * set the metadata provider
-     */
-    @Override
-    public void setMetadataProvider(IMetadataProvider<PartitionDesc, Object> metadata) {
-        this.metaData = metadata;
-    }
-
-    /**
-     * insert ReplicateOperator when necessary
-     */
-    private void insertReplicateOperator(List<Mutable<ILogicalOperator>> roots) {
-        Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> childToParentsMap = new HashMap<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>>();
-        buildChildToParentsMapping(roots, childToParentsMap);
-        for (Entry<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> entry : childToParentsMap.entrySet()) {
-            List<Mutable<ILogicalOperator>> pList = entry.getValue();
-            if (pList.size() > 1) {
-                ILogicalOperator rop = new ReplicateOperator(pList.size());
-                Mutable<ILogicalOperator> ropRef = new MutableObject<ILogicalOperator>(rop);
-                Mutable<ILogicalOperator> childRef = entry.getKey();
-                rop.getInputs().add(childRef);
-                for (Mutable<ILogicalOperator> parentRef : pList) {
-                    ILogicalOperator parentOp = parentRef.getValue();
-                    int index = parentOp.getInputs().indexOf(childRef);
-                    parentOp.getInputs().set(index, ropRef);
-                }
-            }
-        }
-    }
-
-    /**
-     * build the mapping from child to Parents
-     * 
-     * @param roots
-     * @param childToParentsMap
-     */
-    private void buildChildToParentsMapping(List<Mutable<ILogicalOperator>> roots,
-            Map<Mutable<ILogicalOperator>, List<Mutable<ILogicalOperator>>> map) {
-        for (Mutable<ILogicalOperator> opRef : roots) {
-            List<Mutable<ILogicalOperator>> childRefs = opRef.getValue().getInputs();
-            for (Mutable<ILogicalOperator> childRef : childRefs) {
-                List<Mutable<ILogicalOperator>> parentList = map.get(childRef);
-                if (parentList == null) {
-                    parentList = new ArrayList<Mutable<ILogicalOperator>>();
-                    map.put(childRef, parentList);
-                }
-                if (!parentList.contains(opRef))
-                    parentList.add(opRef);
-            }
-            buildChildToParentsMapping(childRefs, map);
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
deleted file mode 100644
index 7197975..0000000
--- a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveLogicalPlanAndMetaData.java
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.logical.plan;
-
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlanAndMetadata;
-import edu.uci.ics.hyracks.algebricks.core.algebra.metadata.IMetadataProvider;
-
-@SuppressWarnings({ "rawtypes", "unchecked" })
-public class HiveLogicalPlanAndMetaData implements ILogicalPlanAndMetadata {
-
-    IMetadataProvider metadata;
-    ILogicalPlan plan;
-
-    public HiveLogicalPlanAndMetaData(ILogicalPlan plan, IMetadataProvider metadata) {
-        this.plan = plan;
-        this.metadata = metadata;
-    }
-
-    @Override
-    public IMetadataProvider getMetadataProvider() {
-        return metadata;
-    }
-
-    @Override
-    public ILogicalPlan getPlan() {
-        return plan;
-    }
-
-    @Override
-    public AlgebricksPartitionConstraint getClusterLocations() {
-        // TODO Auto-generated method stub
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
deleted file mode 100644
index 6f27c59..0000000
--- a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/HiveOperatorAnnotations.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.logical.plan;
-
-public class HiveOperatorAnnotations {
-
-    // hints
-    public static final String LOCAL_GROUP_BY = "LOCAL_GROUP_BY";
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
deleted file mode 100644
index fab3e34..0000000
--- a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/ExtractVisitor.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.logical.plan.visitor;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.hadoop.hive.ql.exec.ExtractOperator;
-
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;
-import edu.uci.ics.hivesterix.runtime.jobgen.Schema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-
-public class ExtractVisitor extends DefaultVisitor {
-
-    @Override
-    public Mutable<ILogicalOperator> visit(ExtractOperator operator,
-            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {
-        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));
-        operator.setSchema(operator.getParentOperators().get(0).getSchema());
-        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);
-        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);
-        return null;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
deleted file mode 100644
index 642285d..0000000
--- a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/FilterVisitor.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.logical.plan.visitor;
-
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-import org.apache.hadoop.hive.ql.exec.FilterOperator;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.FilterDesc;
-
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;
-import edu.uci.ics.hivesterix.runtime.jobgen.Schema;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
-
-public class FilterVisitor extends DefaultVisitor {
-
-    @Override
-    public Mutable<ILogicalOperator> visit(FilterOperator operator,
-            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {
-        Schema currentSchema = t.generateInputSchema(operator.getParentOperators().get(0));
-
-        FilterDesc desc = (FilterDesc) operator.getConf();
-        ExprNodeDesc predicate = desc.getPredicate();
-        t.rewriteExpression(predicate);
-
-        Mutable<ILogicalExpression> exprs = t.translateScalarFucntion(desc.getPredicate());
-        ILogicalOperator currentOperator = new SelectOperator(exprs, false, null);
-        currentOperator.getInputs().add(AlgebricksParentOperatorRef);
-
-        // populate the schema from upstream operator
-        operator.setSchema(operator.getParentOperators().get(0).getSchema());
-        List<LogicalVariable> latestOutputSchema = t.getVariablesFromSchema(currentSchema);
-        t.rewriteOperatorOutputSchema(latestOutputSchema, operator);
-        return new MutableObject<ILogicalOperator>(currentOperator);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
----------------------------------------------------------------------
diff --git a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java b/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
deleted file mode 100644
index 82c619a..0000000
--- a/hivesterix/hivesterix-translator/src/main/java/edu/uci/ics/hivesterix/logical/plan/visitor/GroupByVisitor.java
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hivesterix.logical.plan.visitor;
-
-import java.lang.reflect.Field;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-
-import org.apache.commons.lang3.mutable.Mutable;
-import org.apache.commons.lang3.mutable.MutableObject;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.ColumnInfo;
-import org.apache.hadoop.hive.ql.exec.GroupByOperator;
-import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
-import org.apache.hadoop.hive.ql.plan.AggregationDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
-import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.GroupByDesc;
-import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
-import org.apache.hadoop.hive.ql.plan.api.OperatorType;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer;
-import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
-
-import edu.uci.ics.hivesterix.common.config.ConfUtil;
-import edu.uci.ics.hivesterix.logical.plan.HiveOperatorAnnotations;
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.DefaultVisitor;
-import edu.uci.ics.hivesterix.logical.plan.visitor.base.Translator;
-import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
-import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.ILogicalPlan;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.LogicalVariable;
-import edu.uci.ics.hyracks.algebricks.core.algebra.base.OperatorAnnotations;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AbstractFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.AggregateFunctionCallExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
-import edu.uci.ics.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
-import edu.uci.ics.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
-
-@SuppressWarnings({ "rawtypes", "unchecked" })
-public class GroupByVisitor extends DefaultVisitor {
-
-    private List<Mutable<ILogicalExpression>> AlgebricksAggs = new ArrayList<Mutable<ILogicalExpression>>();
-    private List<IFunctionInfo> localAggs = new ArrayList<IFunctionInfo>();
-    private boolean isDistinct = false;
-    private boolean gbyKeyNotRedKey = false;
-
-    @Override
-    public Mutable<ILogicalOperator> visit(GroupByOperator operator,
-            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) throws AlgebricksException {
-
-        // get descriptors
-        GroupByDesc desc = (GroupByDesc) operator.getConf();
-        GroupByDesc.Mode mode = desc.getMode();
-
-        List<ExprNodeDesc> keys = desc.getKeys();
-        List<AggregationDesc> aggregators = desc.getAggregators();
-
-        Operator child = operator.getChildOperators().get(0);
-
-        if (child.getType() == OperatorType.REDUCESINK) {
-            List<ExprNodeDesc> partKeys = ((ReduceSinkDesc) child.getConf()).getPartitionCols();
-            if (keys.size() != partKeys.size())
-                gbyKeyNotRedKey = true;
-        }
-
-        if (mode == GroupByDesc.Mode.PARTIAL1 || mode == GroupByDesc.Mode.HASH || mode == GroupByDesc.Mode.COMPLETE
-                || (aggregators.size() == 0 && isDistinct == false) || gbyKeyNotRedKey) {
-            AlgebricksAggs.clear();
-            // add an assign operator if the key is not a column expression
-            ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();
-            ILogicalOperator currentOperator = null;
-            ILogicalOperator assignOperator = t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);
-            if (assignOperator != null) {
-                currentOperator = assignOperator;
-                AlgebricksParentOperatorRef = new MutableObject<ILogicalOperator>(currentOperator);
-            }
-
-            // get key variable expression list
-            List<Mutable<ILogicalExpression>> keyExprs = new ArrayList<Mutable<ILogicalExpression>>();
-            for (LogicalVariable var : keyVariables) {
-                keyExprs.add(t.translateScalarFucntion(new ExprNodeColumnDesc(TypeInfoFactory.intTypeInfo, var
-                        .toString(), "", false)));
-            }
-
-            if (aggregators.size() == 0) {
-                List<Mutable<ILogicalExpression>> distinctExprs = new ArrayList<Mutable<ILogicalExpression>>();
-                for (LogicalVariable var : keyVariables) {
-                    Mutable<ILogicalExpression> varExpr = new MutableObject<ILogicalExpression>(
-                            new VariableReferenceExpression(var));
-                    distinctExprs.add(varExpr);
-                }
-                t.rewriteOperatorOutputSchema(keyVariables, operator);
-                isDistinct = true;
-                ILogicalOperator lop = new DistinctOperator(distinctExprs);
-                lop.getInputs().add(AlgebricksParentOperatorRef);
-                return new MutableObject<ILogicalOperator>(lop);
-            }
-
-            // get the pair<LogicalVariable, ILogicalExpression> list
-            List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> keyParameters = new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>();
-            keyVariables.clear();
-            for (Mutable<ILogicalExpression> expr : keyExprs) {
-                LogicalVariable keyVar = t.getVariable(expr.getValue().toString(), TypeInfoFactory.unknownTypeInfo);
-                keyParameters.add(new Pair(keyVar, expr));
-                keyVariables.add(keyVar);
-            }
-
-            // get the parameters for the aggregator operator
-            ArrayList<LogicalVariable> aggVariables = new ArrayList<LogicalVariable>();
-            ArrayList<Mutable<ILogicalExpression>> aggExprs = new ArrayList<Mutable<ILogicalExpression>>();
-
-            // get the type of each aggregation function
-            HashMap<AggregationDesc, TypeInfo> aggToType = new HashMap<AggregationDesc, TypeInfo>();
-            List<ColumnInfo> columns = operator.getSchema().getSignature();
-            int offset = keys.size();
-            for (int i = offset; i < columns.size(); i++) {
-                aggToType.put(aggregators.get(i - offset), columns.get(i).getType());
-            }
-
-            localAggs.clear();
-            // rewrite parameter expressions for all aggregators
-            for (AggregationDesc aggregator : aggregators) {
-                for (ExprNodeDesc parameter : aggregator.getParameters()) {
-                    t.rewriteExpression(parameter);
-                }
-                Mutable<ILogicalExpression> aggExpr = t.translateAggregation(aggregator);
-                AbstractFunctionCallExpression localAggExpr = (AbstractFunctionCallExpression) aggExpr.getValue();
-                localAggs.add(localAggExpr.getFunctionInfo());
-
-                AggregationDesc logicalAgg = new AggregationDesc(aggregator.getGenericUDAFName(),
-                        aggregator.getGenericUDAFEvaluator(), aggregator.getParameters(), aggregator.getDistinct(),
-                        Mode.COMPLETE);
-                Mutable<ILogicalExpression> logicalAggExpr = t.translateAggregation(logicalAgg);
-
-                AlgebricksAggs.add(logicalAggExpr);
-                if (!gbyKeyNotRedKey)
-                    aggExprs.add(logicalAggExpr);
-                else
-                    aggExprs.add(aggExpr);
-
-                aggVariables.add(t.getVariable(aggregator.getExprString() + aggregator.getMode(),
-                        aggToType.get(aggregator)));
-            }
-
-            if (child.getType() != OperatorType.REDUCESINK)
-                gbyKeyNotRedKey = false;
-
-            // get the sub plan list
-            AggregateOperator aggOperator = new AggregateOperator(aggVariables, aggExprs);
-            NestedTupleSourceOperator nestedTupleSource = new NestedTupleSourceOperator(
-                    new MutableObject<ILogicalOperator>());
-            aggOperator.getInputs().add(new MutableObject<ILogicalOperator>(nestedTupleSource));
-
-            List<Mutable<ILogicalOperator>> subRoots = new ArrayList<Mutable<ILogicalOperator>>();
-            subRoots.add(new MutableObject<ILogicalOperator>(aggOperator));
-            ILogicalPlan subPlan = new ALogicalPlanImpl(subRoots);
-            List<ILogicalPlan> subPlans = new ArrayList<ILogicalPlan>();
-            subPlans.add(subPlan);
-
-            // create the group by operator
-            currentOperator = new edu.uci.ics.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator(
-                    keyParameters, new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(), subPlans);
-            currentOperator.getInputs().add(AlgebricksParentOperatorRef);
-            nestedTupleSource.getDataSourceReference().setValue(currentOperator);
-
-            List<LogicalVariable> outputVariables = new ArrayList<LogicalVariable>();
-            outputVariables.addAll(keyVariables);
-            outputVariables.addAll(aggVariables);
-            t.rewriteOperatorOutputSchema(outputVariables, operator);
-
-            if (gbyKeyNotRedKey) {
-                currentOperator.getAnnotations().put(HiveOperatorAnnotations.LOCAL_GROUP_BY, Boolean.TRUE);
-            }
-
-            HiveConf conf = ConfUtil.getHiveConf();
-            Boolean extGby = conf.getBoolean("hive.algebricks.groupby.external", false);
-
-            if (extGby && isSerializable(aggregators)) {
-                currentOperator.getAnnotations().put(OperatorAnnotations.USE_EXTERNAL_GROUP_BY, Boolean.TRUE);
-            }
-            return new MutableObject<ILogicalOperator>(currentOperator);
-        } else {
-            isDistinct = false;
-            // rewrite parameter expressions for all aggregators
-            int i = 0;
-            for (AggregationDesc aggregator : aggregators) {
-                for (ExprNodeDesc parameter : aggregator.getParameters()) {
-                    t.rewriteExpression(parameter);
-                }
-                Mutable<ILogicalExpression> agg = t.translateAggregation(aggregator);
-                AggregateFunctionCallExpression originalAgg = (AggregateFunctionCallExpression) AlgebricksAggs.get(i)
-                        .getValue();
-                originalAgg.setStepOneAggregate(localAggs.get(i));
-                AggregateFunctionCallExpression currentAgg = (AggregateFunctionCallExpression) agg.getValue();
-                if (currentAgg.getFunctionInfo() != null) {
-                    originalAgg.setTwoStep(true);
-                    originalAgg.setStepTwoAggregate(currentAgg.getFunctionInfo());
-                }
-                i++;
-            }
-            return null;
-        }
-    }
-
-    @Override
-    public Mutable<ILogicalOperator> visit(ReduceSinkOperator operator,
-            Mutable<ILogicalOperator> AlgebricksParentOperatorRef, Translator t) {
-        Operator downStream = (Operator) operator.getChildOperators().get(0);
-        if (!(downStream instanceof GroupByOperator)) {
-            return null;
-        }
-
-        ReduceSinkDesc desc = (ReduceSinkDesc) operator.getConf();
-        List<ExprNodeDesc> keys = desc.getKeyCols();
-        List<ExprNodeDesc> values = desc.getValueCols();
-
-        // insert assign for keys
-        ArrayList<LogicalVariable> keyVariables = new ArrayList<LogicalVariable>();
-        t.getAssignOperator(AlgebricksParentOperatorRef, keys, keyVariables);
-
-        // insert assign for values
-        ArrayList<LogicalVariable> valueVariables = new ArrayList<LogicalVariable>();
-        t.getAssignOperator(AlgebricksParentOperatorRef, values, valueVariables);
-
-        ArrayList<LogicalVariable> columns = new ArrayList<LogicalVariable>();
-        columns.addAll(keyVariables);
-        columns.addAll(valueVariables);
-
-        t.rewriteOperatorOutputSchema(columns, operator);
-        return null;
-    }
-
-    private boolean isSerializable(List<AggregationDesc> descs) throws AlgebricksException {
-        try {
-            for (AggregationDesc desc : descs) {
-                GenericUDAFEvaluator udaf = desc.getGenericUDAFEvaluator();
-                AggregationBuffer buf = udaf.getNewAggregationBuffer();
-                Class<?> bufferClass = buf.getClass();
-                Field[] fields = bufferClass.getDeclaredFields();
-                for (Field field : fields) {
-                    field.setAccessible(true);
-                    String type = field.getType().toString();
-                    if (!(type.equals("int") || type.equals("long") || type.equals("float") || type.equals("double") || type
-                            .equals("boolean"))) {
-                        return false;
-                    }
-                }
-
-            }
-            return true;
-        } catch (Exception e) {
-            throw new AlgebricksException(e);
-        }
-    }
-
-}


Mime
View raw message