asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ima...@apache.org
Subject [04/85] [abbrv] [partial] incubator-asterixdb-hyracks git commit: Move Pregelix and Hivesterix codebase to new repositories: 1. Move Pregelix codebase to https://github.com/pregelix/pregelix; 2. Move Hivesterix codebase to https://code.google.com/p/hives
Date Fri, 24 Apr 2015 18:45:31 GMT
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
deleted file mode 100644
index aab647b..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
-import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
-import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
-import edu.uci.ics.pregelix.dataflow.std.util.CopyUpdateUtil;
-import edu.uci.ics.pregelix.dataflow.std.util.FunctionProxy;
-import edu.uci.ics.pregelix.dataflow.std.util.SearchKeyTupleReference;
-import edu.uci.ics.pregelix.dataflow.std.util.StorageType;
-import edu.uci.ics.pregelix.dataflow.std.util.UpdateBuffer;
-
-public class IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
-    private IndexDataflowHelper treeIndexOpHelper;
-    private FrameTupleAccessor accessor;
-
-    private ByteBuffer writeBuffer;
-    private FrameTupleAppender appender;
-
-    private ITreeIndex index;
-    private boolean isForward;
-    private RangePredicate rangePred;
-    private MultiComparator lowKeySearchCmp;
-    private IIndexCursor cursor;
-    protected IIndexAccessor indexAccessor;
-
-    private RecordDescriptor recDesc;
-    private PermutingFrameTupleReference lowKey;
-    private PermutingFrameTupleReference highKey;
-
-    private ITupleReference currentTopTuple;
-    private boolean match;
-
-    private final IFrameWriter[] writers;
-    private final FunctionProxy functionProxy;
-    private ArrayTupleBuilder cloneUpdateTb;
-    private UpdateBuffer updateBuffer;
-    private final SearchKeyTupleReference tempTupleReference = new SearchKeyTupleReference();
-    private final StorageType storageType;
-
-    public IndexNestedLoopSetUnionFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
-            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
-            int[] lowKeyFields, int[] highKeyFields, IUpdateFunctionFactory functionFactory,
-            IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory,
-            IRecordDescriptorFactory inputRdFactory, int outputArity) throws HyracksDataException {
-        treeIndexOpHelper = (IndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
-                opDesc, ctx, partition);
-        if (treeIndexOpHelper instanceof TreeIndexDataflowHelper) {
-            storageType = StorageType.TreeIndex;
-        } else {
-            storageType = StorageType.LSMIndex;
-        }
-        this.isForward = isForward;
-        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-
-        if (lowKeyFields != null && lowKeyFields.length > 0) {
-            lowKey = new PermutingFrameTupleReference();
-            lowKey.setFieldPermutation(lowKeyFields);
-        }
-        if (highKeyFields != null && highKeyFields.length > 0) {
-            highKey = new PermutingFrameTupleReference();
-            highKey.setFieldPermutation(highKeyFields);
-        }
-
-        this.writers = new IFrameWriter[outputArity];
-        this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
-                writers);
-        this.updateBuffer = new UpdateBuffer(ctx, 2);
-    }
-
-    protected void setCursor() {
-        cursor = indexAccessor.createSearchCursor(true);
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        functionProxy.functionOpen();
-        accessor = new FrameTupleAccessor(treeIndexOpHelper.getTaskContext().getFrameSize(), recDesc);
-
-        try {
-            treeIndexOpHelper.open();
-            index = (ITreeIndex) treeIndexOpHelper.getIndexInstance();
-
-            rangePred = new RangePredicate(null, null, true, true, null, null);
-            int lowKeySearchFields = index.getComparatorFactories().length;
-            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
-            for (int i = 0; i < lowKeySearchFields; i++) {
-                lowKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
-            }
-            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
-
-            writeBuffer = treeIndexOpHelper.getTaskContext().allocateFrame();
-            appender = new FrameTupleAppender(treeIndexOpHelper.getTaskContext().getFrameSize());
-            appender.reset(writeBuffer, true);
-
-            indexAccessor = index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            setCursor();
-
-            /** set the search cursor */
-            rangePred.setLowKey(null, true);
-            rangePred.setHighKey(null, true);
-            cursor.reset();
-            indexAccessor.search(cursor, rangePred);
-
-            /** set up current top tuple */
-            if (cursor.hasNext()) {
-                cursor.next();
-                currentTopTuple = cursor.getTuple();
-                match = false;
-            }
-            cloneUpdateTb = new ArrayTupleBuilder(index.getFieldCount());
-            updateBuffer.setFieldCount(index.getFieldCount());
-        } catch (Exception e) {
-            closeResource();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        try {
-            for (int i = 0; i < tupleCount;) {
-                if (lowKey != null)
-                    lowKey.reset(accessor, i);
-                if (highKey != null)
-                    highKey.reset(accessor, i);
-                // TODO: currently use low key only, check what they mean
-                if (currentTopTuple != null) {
-                    int cmp = compare(lowKey, currentTopTuple);
-                    if (cmp == 0) {
-                        outputMatch(i);
-                        i++;
-                    } else if ((cmp > 0 && isForward) || (cmp < 0 && !isForward)) {
-                        moveTreeCursor();
-                    } else {
-                        writeLeftResults(accessor, i, null);
-                        i++;
-                    }
-                } else {
-                    writeLeftResults(accessor, i, null);
-                    i++;
-                }
-            }
-        } catch (Exception e) {
-            closeResource();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void outputMatch(int i) throws Exception {
-        writeLeftResults(accessor, i, currentTopTuple);
-        match = true;
-    }
-
-    private void moveTreeCursor() throws Exception {
-        if (!match) {
-            writeRightResults(currentTopTuple);
-        }
-        if (cursor.hasNext()) {
-            cursor.next();
-            currentTopTuple = cursor.getTuple();
-            match = false;
-        } else {
-            currentTopTuple = null;
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            while (currentTopTuple != null) {
-                moveTreeCursor();
-            }
-            try {
-                cursor.close();
-
-                //batch update
-                updateBuffer.updateIndex(indexAccessor);
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-            functionProxy.functionClose();
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        } finally {
-            treeIndexOpHelper.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        closeResource();
-        populateFailure();
-    }
-
-    private void populateFailure() throws HyracksDataException {
-        for (IFrameWriter writer : writers) {
-            writer.fail();
-        }
-    }
-
-    private void closeResource() throws HyracksDataException {
-        try {
-            cursor.close();
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        } finally {
-            treeIndexOpHelper.close();
-        }
-    }
-
-    /** compare tuples */
-    private int compare(ITupleReference left, ITupleReference right) throws Exception {
-        return lowKeySearchCmp.compare(left, right);
-    }
-
-    /** write the right result */
-    private void writeRightResults(ITupleReference frameTuple) throws Exception {
-        functionProxy.functionCall(frameTuple, cloneUpdateTb, cursor);
-
-        //doing clone update
-        CopyUpdateUtil.copyUpdate(tempTupleReference, frameTuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
-                rangePred, true, storageType);
-    }
-
-    /** write the left result */
-    private void writeLeftResults(IFrameTupleAccessor leftAccessor, int tIndex, ITupleReference frameTuple)
-            throws Exception {
-        functionProxy.functionCall(leftAccessor, tIndex, frameTuple, cloneUpdateTb, cursor);
-
-        //doing clone update
-        CopyUpdateUtil.copyUpdate(tempTupleReference, frameTuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
-                rangePred, true, storageType);
-    }
-
-    @Override
-    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-        writers[index] = writer;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionOperatorNodePushable.java
deleted file mode 100644
index 89d5e3c..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/IndexNestedLoopSetUnionOperatorNodePushable.java
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-
-public class IndexNestedLoopSetUnionOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-    private IndexDataflowHelper treeIndexOpHelper;
-    private FrameTupleAccessor accessor;
-
-    private ByteBuffer writeBuffer;
-    private FrameTupleAppender appender;
-    private ArrayTupleBuilder tb;
-    private DataOutput dos;
-
-    private ITreeIndex index;
-    private RangePredicate rangePred;
-    private MultiComparator lowKeySearchCmp;
-    private IIndexCursor cursor;
-    protected IIndexAccessor indexAccessor;
-
-    private RecordDescriptor recDesc;
-    private final RecordDescriptor inputRecDesc;
-
-    private PermutingFrameTupleReference lowKey;
-    private PermutingFrameTupleReference highKey;
-
-    private ITupleReference currentTopTuple;
-    private boolean match;
-
-    public IndexNestedLoopSetUnionOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
-            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
-            int[] lowKeyFields, int[] highKeyFields) {
-        inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-        treeIndexOpHelper = (IndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
-                opDesc, ctx, partition);
-        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-
-        if (lowKeyFields != null && lowKeyFields.length > 0) {
-            lowKey = new PermutingFrameTupleReference();
-            lowKey.setFieldPermutation(lowKeyFields);
-        }
-        if (highKeyFields != null && highKeyFields.length > 0) {
-            highKey = new PermutingFrameTupleReference();
-            highKey.setFieldPermutation(highKeyFields);
-        }
-    }
-
-    protected void setCursor() {
-        cursor = indexAccessor.createSearchCursor(false);
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        accessor = new FrameTupleAccessor(treeIndexOpHelper.getTaskContext().getFrameSize(), recDesc);
-
-        try {
-            treeIndexOpHelper.open();
-            index = (ITreeIndex) treeIndexOpHelper.getIndexInstance();
-            writer.open();
-
-            rangePred = new RangePredicate(null, null, true, true, null, null);
-            int lowKeySearchFields = index.getComparatorFactories().length;
-            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
-            for (int i = 0; i < lowKeySearchFields; i++) {
-                lowKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
-            }
-            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
-
-            writeBuffer = treeIndexOpHelper.getTaskContext().allocateFrame();
-            tb = new ArrayTupleBuilder(index.getFieldCount());
-            dos = tb.getDataOutput();
-            appender = new FrameTupleAppender(treeIndexOpHelper.getTaskContext().getFrameSize());
-            appender.reset(writeBuffer, true);
-
-            indexAccessor = index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            setCursor();
-
-            /** set the search cursor */
-            rangePred.setLowKey(null, true);
-            rangePred.setHighKey(null, true);
-            cursor.reset();
-            indexAccessor.search(cursor, rangePred);
-
-            /** set up current top tuple */
-            if (cursor.hasNext()) {
-                cursor.next();
-                currentTopTuple = cursor.getTuple();
-                match = false;
-            }
-
-        } catch (Exception e) {
-            closeResource();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        try {
-            for (int i = 0; i < tupleCount;) {
-                if (lowKey != null)
-                    lowKey.reset(accessor, i);
-                if (highKey != null)
-                    highKey.reset(accessor, i);
-                // TODO: currently use low key only, check what they mean
-                if (currentTopTuple != null) {
-                    int cmp = compare(lowKey, currentTopTuple);
-                    if (cmp == 0) {
-                        outputMatch(i);
-                        i++;
-                    } else if ((cmp > 0)) {
-                        moveTreeCursor();
-                    } else {
-                        writeLeftResults(accessor, i);
-                        i++;
-                    }
-                } else {
-                    writeLeftResults(accessor, i);
-                    i++;
-                }
-            }
-        } catch (Exception e) {
-            closeResource();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    private void outputMatch(int i) throws Exception {
-        writeLeftResults(accessor, i);
-        match = true;
-    }
-
-    private void moveTreeCursor() throws Exception {
-        if (!match) {
-            writeRightResults(currentTopTuple);
-        }
-        if (cursor.hasNext()) {
-            cursor.next();
-            currentTopTuple = cursor.getTuple();
-            match = false;
-        } else {
-            currentTopTuple = null;
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            while (currentTopTuple != null) {
-                moveTreeCursor();
-            }
-
-            if (appender.getTupleCount() > 0) {
-                FrameUtils.flushFrame(writeBuffer, writer);
-            }
-            try {
-                cursor.close();
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-        } catch (Exception e) {
-            closeResource();
-            throw new HyracksDataException(e);
-        } finally{
-            treeIndexOpHelper.close();
-            writer.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        closeResource();
-        populateFailure();
-    }
-
-    private void closeResource() throws HyracksDataException {
-        try {
-            cursor.close();
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        } finally {
-            treeIndexOpHelper.close();
-        }
-    }
-
-    private void populateFailure() throws HyracksDataException {
-        writer.fail();
-    }
-
-    /** compare tuples */
-    private int compare(ITupleReference left, ITupleReference right) throws Exception {
-        return lowKeySearchCmp.compare(left, right);
-    }
-
-    /** write the right result */
-    private void writeRightResults(ITupleReference frameTuple) throws Exception {
-        tb.reset();
-        for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-            dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-            tb.addFieldEndOffset();
-        }
-
-        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-            FrameUtils.flushFrame(writeBuffer, writer);
-            appender.reset(writeBuffer, true);
-            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                throw new IllegalStateException();
-            }
-        }
-    }
-
-    /** write the left result */
-    private void writeLeftResults(IFrameTupleAccessor leftAccessor, int tIndex) throws Exception {
-        tb.reset();
-        for (int i = 0; i < inputRecDesc.getFields().length; i++) {
-            int tupleStart = leftAccessor.getTupleStartOffset(tIndex);
-            int fieldStart = leftAccessor.getFieldStartOffset(tIndex, i);
-            int offset = leftAccessor.getFieldSlotsLength() + tupleStart + fieldStart;
-            int len = leftAccessor.getFieldEndOffset(tIndex, i) - fieldStart;
-            dos.write(leftAccessor.getBuffer().array(), offset, len);
-            tb.addFieldEndOffset();
-        }
-
-        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-            FrameUtils.flushFrame(writeBuffer, writer);
-            appender.reset(writeBuffer, true);
-            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                throw new IllegalStateException();
-            }
-        }
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/ProjectOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/ProjectOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/ProjectOperatorDescriptor.java
deleted file mode 100644
index 4bdde16..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/ProjectOperatorDescriptor.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-public class ProjectOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-    private static final long serialVersionUID = 1L;
-    private final int[] projectFields;
-
-    public ProjectOperatorDescriptor(JobSpecification spec, RecordDescriptor rDesc, int projectFields[]) {
-        super(spec, 1, 1);
-        this.recordDescriptors[0] = rDesc;
-        this.projectFields = projectFields;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
-            throws HyracksDataException {
-        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
-            private final RecordDescriptor rd0 = recordDescProvider.getInputRecordDescriptor(getActivityId(), 0);
-            private final FrameTupleAccessor fta = new FrameTupleAccessor(ctx.getFrameSize(), rd0);
-            private final FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-            private final ArrayTupleBuilder tb = new ArrayTupleBuilder(projectFields.length);
-            private final DataOutput dos = tb.getDataOutput();
-            private final ByteBuffer writeBuffer = ctx.allocateFrame();
-
-            @Override
-            public void close() throws HyracksDataException {
-                if (appender.getTupleCount() > 0)
-                    FrameUtils.flushFrame(writeBuffer, writer);
-                writer.close();
-            }
-
-            @Override
-            public void fail() throws HyracksDataException {
-                writer.fail();
-            }
-
-            @Override
-            public void nextFrame(ByteBuffer frame) throws HyracksDataException {
-                fta.reset(frame);
-                int tupleCount = fta.getTupleCount();
-                try {
-                    for (int tIndex = 0; tIndex < tupleCount; tIndex++) {
-                        tb.reset();
-                        for (int j = 0; j < projectFields.length; j++) {
-                            int fIndex = projectFields[j];
-                            int tupleStart = fta.getTupleStartOffset(tIndex);
-                            int fieldStart = fta.getFieldStartOffset(tIndex, fIndex);
-                            int offset = fta.getFieldSlotsLength() + tupleStart + fieldStart;
-                            int len = fta.getFieldEndOffset(tIndex, fIndex) - fieldStart;
-                            dos.write(fta.getBuffer().array(), offset, len);
-                            tb.addFieldEndOffset();
-                        }
-                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                            FrameUtils.flushFrame(writeBuffer, writer);
-                            appender.reset(writeBuffer, true);
-                            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                                throw new IllegalStateException();
-                            }
-                        }
-                    }
-                } catch (IOException e) {
-                    throw new HyracksDataException(e);
-                }
-            }
-
-            @Override
-            public void open() throws HyracksDataException {
-                writer.open();
-                appender.reset(writeBuffer, true);
-            }
-
-        };
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/RuntimeHookOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/RuntimeHookOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/RuntimeHookOperatorDescriptor.java
deleted file mode 100644
index 1b76418..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/RuntimeHookOperatorDescriptor.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHook;
-import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
-
-public class RuntimeHookOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-    private final IRuntimeHookFactory hookFactory;
-
-    public RuntimeHookOperatorDescriptor(JobSpecification spec, IRuntimeHookFactory hookFactory) {
-        super(spec, 1, 1);
-        this.hookFactory = hookFactory;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        return new AbstractUnaryInputUnaryOutputOperatorNodePushable() {
-            private IRuntimeHook hook = hookFactory.createRuntimeHook();
-
-            @Override
-            public void open() throws HyracksDataException {
-                hook.configure(ctx);
-                writer.open();
-            }
-
-            @Override
-            public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                FrameUtils.flushFrame(buffer, writer);
-            }
-
-            @Override
-            public void fail() throws HyracksDataException {
-                writer.fail();
-            }
-
-            @Override
-            public void close() throws HyracksDataException {
-                writer.close();
-            }
-
-        };
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorDescriptor.java
deleted file mode 100644
index 160e89a..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorDescriptor.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.pregelix.dataflow.std;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
-
-public class TreeIndexBulkReLoadOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    private final int[] fieldPermutation;
-    private final IStorageManagerInterface storageManager;
-    private final IIndexLifecycleManagerProvider lcManagerProvider;
-    private final IFileSplitProvider fileSplitProvider;
-    private final float fillFactor;
-
-    public TreeIndexBulkReLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
-            IIndexLifecycleManagerProvider lcManagerProvider, IFileSplitProvider fileSplitProvider,
-            ITypeTraits[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation,
-            int[] bloomFilterFields, float fillFactor, IIndexDataflowHelperFactory opHelperFactory) {
-        super(spec, 1, 0, null, storageManager, lcManagerProvider, fileSplitProvider, typeTraits, comparatorFactories,
-                bloomFilterFields, opHelperFactory, null, false, false,
-                null, new TransientLocalResourceFactoryProvider(), NoOpOperationCallbackFactory.INSTANCE, NoOpOperationCallbackFactory.INSTANCE);
-        this.fieldPermutation = fieldPermutation;
-
-        this.storageManager = storageManager;
-        this.lcManagerProvider = lcManagerProvider;
-        this.fileSplitProvider = fileSplitProvider;
-        this.fillFactor = fillFactor;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexBulkReLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor,
-                recordDescProvider, storageManager, lcManagerProvider, fileSplitProvider);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorNodePushable.java
deleted file mode 100644
index 0056e8f..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeIndexBulkReLoadOperatorNodePushable.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoader;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-public class TreeIndexBulkReLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
-    private final float fillFactor;
-    private final IndexDataflowHelper treeIndexOpHelper;
-    private final IIndexOperatorDescriptor opDesc;
-    private final IRecordDescriptorProvider recordDescProvider;
-    private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
-
-    private ITreeIndex index;
-    private FrameTupleAccessor accessor;
-    private IIndexBulkLoader bulkLoader;
-
-    public TreeIndexBulkReLoadOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, int[] fieldPermutation, float fillFactor, IRecordDescriptorProvider recordDescProvider,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lcManagerProvider,
-            IFileSplitProvider fileSplitProvider) {
-        this.fillFactor = fillFactor;
-        treeIndexOpHelper = (IndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
-                opDesc, ctx, partition);
-        this.opDesc = opDesc;
-        this.recordDescProvider = recordDescProvider;
-        tuple.setFieldPermutation(fieldPermutation);
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-        accessor = new FrameTupleAccessor(treeIndexOpHelper.getTaskContext().getFrameSize(), recDesc);
-        treeIndexOpHelper.destroy();
-        treeIndexOpHelper.create();
-        treeIndexOpHelper.open();
-        try {
-            index = (ITreeIndex) treeIndexOpHelper.getIndexInstance();
-            bulkLoader = index.createBulkLoader(fillFactor, false, 100000, false);
-        } catch (Exception e) {
-            // cleanup in case of failure
-            treeIndexOpHelper.close();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            tuple.reset(accessor, i);
-            try {
-                bulkLoader.add(tuple);
-            } catch (IndexException e) {
-                throw new HyracksDataException(e);
-            }
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            bulkLoader.end();
-        } catch (IndexException e) {
-            throw new HyracksDataException(e);
-        } finally {
-            treeIndexOpHelper.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        try {
-            bulkLoader.end();
-        } catch (IndexException e) {
-            treeIndexOpHelper.close();
-            throw new HyracksDataException(e);
-        } 
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorDescriptor.java
deleted file mode 100644
index 6cdac98..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorDescriptor.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.pregelix.dataflow.std;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
-import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
-import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
-import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
-
-public class TreeSearchFunctionUpdateOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
-
-    private static final long serialVersionUID = 1L;
-
-    protected boolean isForward;
-    protected int[] lowKeyFields; // fields in input tuple to be used as low
-                                  // keys
-    protected int[] highKeyFields; // fields in input tuple to be used as high
-    // keys
-    protected boolean lowKeyInclusive;
-    protected boolean highKeyInclusive;
-
-    private final IUpdateFunctionFactory functionFactory;
-    private final IRuntimeHookFactory preHookFactory;
-    private final IRuntimeHookFactory postHookFactory;
-    private final IRecordDescriptorFactory inputRdFactory;
-
-    private final int outputArity;
-
-    public TreeSearchFunctionUpdateOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexLifecycleManagerProvider lcManagerProvider,
-            IFileSplitProvider fileSplitProvider, ITypeTraits[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
-            boolean lowKeyInclusive, boolean highKeyInclusive, IIndexDataflowHelperFactory dataflowHelperFactory,
-            IRecordDescriptorFactory inputRdFactory, int outputArity, IUpdateFunctionFactory functionFactory,
-            IRuntimeHookFactory preHookFactory, IRuntimeHookFactory postHookFactory, RecordDescriptor... rDescs) {
-        super(spec, 1, outputArity, recDesc, storageManager, lcManagerProvider, fileSplitProvider, typeTraits,
-                comparatorFactories, null, dataflowHelperFactory, null, false,
-                false, null,
-                new TransientLocalResourceFactoryProvider(), NoOpOperationCallbackFactory.INSTANCE, NoOpOperationCallbackFactory.INSTANCE);
-        this.isForward = isForward;
-        this.lowKeyFields = lowKeyFields;
-        this.highKeyFields = highKeyFields;
-        this.lowKeyInclusive = lowKeyInclusive;
-        this.highKeyInclusive = highKeyInclusive;
-
-        this.functionFactory = functionFactory;
-        this.preHookFactory = preHookFactory;
-        this.postHookFactory = postHookFactory;
-        this.inputRdFactory = inputRdFactory;
-
-        for (int i = 0; i < rDescs.length; i++) {
-            this.recordDescriptors[i] = rDescs[i];
-        }
-
-        this.outputArity = outputArity;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
-        return new TreeSearchFunctionUpdateOperatorNodePushable(this, ctx, partition, recordDescProvider, isForward,
-                lowKeyFields, highKeyFields, lowKeyInclusive, highKeyInclusive, functionFactory, preHookFactory,
-                postHookFactory, inputRdFactory, outputArity);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorNodePushable.java
deleted file mode 100644
index da7288a..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/TreeSearchFunctionUpdateOperatorNodePushable.java
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDataflowHelper;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.PermutingFrameTupleReference;
-import edu.uci.ics.pregelix.dataflow.std.base.IRecordDescriptorFactory;
-import edu.uci.ics.pregelix.dataflow.std.base.IRuntimeHookFactory;
-import edu.uci.ics.pregelix.dataflow.std.base.IUpdateFunctionFactory;
-import edu.uci.ics.pregelix.dataflow.std.util.CopyUpdateUtil;
-import edu.uci.ics.pregelix.dataflow.std.util.FunctionProxy;
-import edu.uci.ics.pregelix.dataflow.std.util.SearchKeyTupleReference;
-import edu.uci.ics.pregelix.dataflow.std.util.StorageType;
-import edu.uci.ics.pregelix.dataflow.std.util.UpdateBuffer;
-
-public class TreeSearchFunctionUpdateOperatorNodePushable extends AbstractUnaryInputOperatorNodePushable {
-    protected IndexDataflowHelper treeIndexHelper;
-    protected FrameTupleAccessor accessor;
-
-    protected ByteBuffer writeBuffer;
-    protected FrameTupleAppender appender;
-    protected ArrayTupleBuilder tb;
-    protected DataOutput dos;
-
-    protected ITreeIndex index;
-    protected boolean isForward;
-    protected PermutingFrameTupleReference lowKey;
-    protected PermutingFrameTupleReference highKey;
-    protected boolean lowKeyInclusive;
-    protected boolean highKeyInclusive;
-    protected RangePredicate rangePred;
-    protected MultiComparator lowKeySearchCmp;
-    protected MultiComparator highKeySearchCmp;
-    protected IIndexCursor cursor;
-    protected ITreeIndexFrame cursorFrame;
-    protected IIndexAccessor indexAccessor;
-    protected int[] lowKeyFields;
-    protected int[] highKeyFields;
-
-    protected RecordDescriptor recDesc;
-
-    private final IFrameWriter[] writers;
-    private final FunctionProxy functionProxy;
-    private ArrayTupleBuilder cloneUpdateTb;
-    private final UpdateBuffer updateBuffer;
-    private final SearchKeyTupleReference tempTupleReference = new SearchKeyTupleReference();
-    private final StorageType storageType;
-
-    public TreeSearchFunctionUpdateOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
-            IHyracksTaskContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward,
-            int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive,
-            IUpdateFunctionFactory functionFactory, IRuntimeHookFactory preHookFactory,
-            IRuntimeHookFactory postHookFactory, IRecordDescriptorFactory inputRdFactory, int outputArity)
-            throws HyracksDataException {
-        treeIndexHelper = (IndexDataflowHelper) opDesc.getIndexDataflowHelperFactory().createIndexDataflowHelper(
-                opDesc, ctx, partition);
-        if (treeIndexHelper instanceof TreeIndexDataflowHelper) {
-            storageType = StorageType.TreeIndex;
-        } else {
-            storageType = StorageType.LSMIndex;
-        }
-        this.isForward = isForward;
-        this.lowKeyInclusive = lowKeyInclusive;
-        this.highKeyInclusive = highKeyInclusive;
-        this.lowKeyFields = lowKeyFields;
-        this.highKeyFields = highKeyFields;
-        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getActivityId(), 0);
-        if (lowKeyFields != null && lowKeyFields.length > 0) {
-            lowKey = new PermutingFrameTupleReference();
-            lowKey.setFieldPermutation(lowKeyFields);
-        }
-        if (highKeyFields != null && highKeyFields.length > 0) {
-            highKey = new PermutingFrameTupleReference();
-            highKey.setFieldPermutation(highKeyFields);
-        }
-
-        this.writers = new IFrameWriter[outputArity];
-        this.functionProxy = new FunctionProxy(ctx, functionFactory, preHookFactory, postHookFactory, inputRdFactory,
-                writers);
-        this.updateBuffer = new UpdateBuffer(ctx, 2);
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        /**
-         * open the function
-         */
-        functionProxy.functionOpen();
-        accessor = new FrameTupleAccessor(treeIndexHelper.getTaskContext().getFrameSize(), recDesc);
-
-        try {
-            treeIndexHelper.open();
-            index = (ITreeIndex) treeIndexHelper.getIndexInstance();
-            cursorFrame = index.getLeafFrameFactory().createFrame();
-
-            // Construct range predicate.
-            int lowKeySearchFields = index.getComparatorFactories().length;
-            int highKeySearchFields = index.getComparatorFactories().length;
-            if (lowKey != null)
-                lowKeySearchFields = lowKey.getFieldCount();
-            if (highKey != null)
-                highKeySearchFields = highKey.getFieldCount();
-
-            IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
-            for (int i = 0; i < lowKeySearchFields; i++) {
-                lowKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
-            }
-            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
-
-            if (lowKeySearchFields == highKeySearchFields) {
-                highKeySearchCmp = lowKeySearchCmp;
-            } else {
-                IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
-                for (int i = 0; i < highKeySearchFields; i++) {
-                    highKeySearchComparators[i] = index.getComparatorFactories()[i].createBinaryComparator();
-                }
-                highKeySearchCmp = new MultiComparator(highKeySearchComparators);
-            }
-
-            rangePred = new RangePredicate(null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
-                    highKeySearchCmp);
-
-            writeBuffer = treeIndexHelper.getTaskContext().allocateFrame();
-            tb = new ArrayTupleBuilder(index.getFieldCount());
-            dos = tb.getDataOutput();
-            appender = new FrameTupleAppender(treeIndexHelper.getTaskContext().getFrameSize());
-            appender.reset(writeBuffer, true);
-            indexAccessor = index.createAccessor(NoOpOperationCallback.INSTANCE, NoOpOperationCallback.INSTANCE);
-            setCursor();
-
-            cloneUpdateTb = new ArrayTupleBuilder(index.getFieldCount());
-            updateBuffer.setFieldCount(index.getFieldCount());
-        } catch (Exception e) {
-        	closeResource();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    protected void setCursor() {
-        cursor = indexAccessor.createSearchCursor(true);
-    }
-
-    protected void writeSearchResults() throws Exception {
-        while (cursor.hasNext()) {
-            cursor.next();
-            ITupleReference tuple = cursor.getTuple();
-            functionProxy.functionCall(tuple, cloneUpdateTb, cursor);
-
-            //doing clone update
-            CopyUpdateUtil.copyUpdate(tempTupleReference, tuple, updateBuffer, cloneUpdateTb, indexAccessor, cursor,
-                    rangePred, true, storageType);
-        }
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        try {
-            for (int i = 0; i < tupleCount; i++) {
-                if (lowKey != null) {
-                    lowKey.reset(accessor, i);
-                }
-                if (highKey != null) {
-                    highKey.reset(accessor, i);
-                }
-                rangePred.setLowKey(lowKey, lowKeyInclusive);
-                rangePred.setHighKey(highKey, highKeyInclusive);
-                cursor.reset();
-                indexAccessor.search(cursor, rangePred);
-                writeSearchResults();
-            }
-        } catch (Exception e) {
-        	closeResource();
-            throw new HyracksDataException(e);
-        }
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            try {
-                cursor.close();
-                //batch update
-                updateBuffer.updateIndex(indexAccessor);
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-
-            /**
-             * close the update function
-             */
-            functionProxy.functionClose();
-        } finally {
-            treeIndexHelper.close();
-        }
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        closeResource();
-        populateFailure();
-    }
-
-    private void populateFailure() throws HyracksDataException {
-        for (IFrameWriter writer : writers) {
-            writer.fail();
-        }
-    }
-
-    private void closeResource() throws HyracksDataException {
-        try {
-            cursor.close();
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        } finally {
-            treeIndexHelper.close();
-        }
-    }
-
-    @Override
-    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-        writers[index] = writer;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/collectors/SortMergeFrameReader.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/collectors/SortMergeFrameReader.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/collectors/SortMergeFrameReader.java
deleted file mode 100644
index 31f4182..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/collectors/SortMergeFrameReader.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std.collectors;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.comm.IFrameReader;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.std.collectors.IPartitionBatchManager;
-import edu.uci.ics.pregelix.dataflow.std.sort.RunMergingFrameReader;
-
-public class SortMergeFrameReader implements IFrameReader {
-    private IHyracksTaskContext ctx;
-    private final int maxConcurrentMerges;
-    private final int nSenders;
-    private final int[] sortFields;
-
-    private final RecordDescriptor recordDescriptor;
-    private final IPartitionBatchManager pbm;
-
-    private RunMergingFrameReader merger;
-
-    public SortMergeFrameReader(IHyracksTaskContext ctx, int maxConcurrentMerges, int nSenders, int[] sortFields,
-            RecordDescriptor recordDescriptor, IPartitionBatchManager pbm) {
-        this.ctx = ctx;
-        this.maxConcurrentMerges = maxConcurrentMerges;
-        this.nSenders = nSenders;
-        this.sortFields = sortFields;
-        this.recordDescriptor = recordDescriptor;
-        this.pbm = pbm;
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        if (maxConcurrentMerges >= nSenders) {
-            List<ByteBuffer> inFrames = new ArrayList<ByteBuffer>();
-            for (int i = 0; i < nSenders; ++i) {
-                inFrames.add(ByteBuffer.allocate(ctx.getFrameSize()));
-            }
-            List<IFrameReader> batch = new ArrayList<IFrameReader>();
-            pbm.getNextBatch(batch, nSenders);
-            merger = new RunMergingFrameReader(ctx, batch.toArray(new IFrameReader[nSenders]), inFrames, sortFields,
-                    recordDescriptor);
-        } else {
-            // multi level merge.
-            throw new HyracksDataException("Not yet supported");
-        }
-        merger.open();
-    }
-
-    @Override
-    public boolean nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        buffer.position(buffer.capacity());
-        buffer.limit(buffer.capacity());
-        return merger.nextFrame(buffer);
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        merger.close();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
deleted file mode 100644
index 2d820bc..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/connectors/MToNPartitioningMergingConnectorDescriptor.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std.connectors;
-
-import java.util.BitSet;
-
-import edu.uci.ics.hyracks.api.comm.IFrameReader;
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.comm.IPartitionCollector;
-import edu.uci.ics.hyracks.api.comm.IPartitionWriterFactory;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.ITuplePartitionComputerFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IConnectorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractMToNConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.collectors.IPartitionBatchManager;
-import edu.uci.ics.hyracks.dataflow.std.collectors.NonDeterministicPartitionBatchManager;
-import edu.uci.ics.hyracks.dataflow.std.collectors.PartitionCollector;
-import edu.uci.ics.hyracks.dataflow.std.connectors.PartitionDataWriter;
-import edu.uci.ics.pregelix.dataflow.std.collectors.SortMergeFrameReader;
-
-public class MToNPartitioningMergingConnectorDescriptor extends AbstractMToNConnectorDescriptor {
-    private static final long serialVersionUID = 1L;
-
-    private final ITuplePartitionComputerFactory tpcf;
-    private final int[] sortFields;
-
-    public MToNPartitioningMergingConnectorDescriptor(IConnectorDescriptorRegistry spec,
-            ITuplePartitionComputerFactory tpcf, int[] sortFields) {
-        this(spec, tpcf, sortFields, false);
-    }
-
-    public MToNPartitioningMergingConnectorDescriptor(IConnectorDescriptorRegistry spec,
-            ITuplePartitionComputerFactory tpcf, int[] sortFields, boolean stable) {
-        super(spec);
-        this.tpcf = tpcf;
-        this.sortFields = sortFields;
-    }
-
-    @Override
-    public IFrameWriter createPartitioner(IHyracksTaskContext ctx, RecordDescriptor recordDesc,
-            IPartitionWriterFactory edwFactory, int index, int nProducerPartitions, int nConsumerPartitions)
-            throws HyracksDataException {
-        final PartitionDataWriter hashWriter = new PartitionDataWriter(ctx, nConsumerPartitions, edwFactory,
-                recordDesc, tpcf.createPartitioner());
-        return hashWriter;
-    }
-
-    @Override
-    public IPartitionCollector createPartitionCollector(IHyracksTaskContext ctx, RecordDescriptor recordDesc,
-            int index, int nProducerPartitions, int nConsumerPartitions) throws HyracksDataException {
-        IPartitionBatchManager pbm = new NonDeterministicPartitionBatchManager(nProducerPartitions);
-        IFrameReader sortMergeFrameReader = new SortMergeFrameReader(ctx, nProducerPartitions, nProducerPartitions,
-                sortFields, recordDesc, pbm);
-        BitSet expectedPartitions = new BitSet();
-        expectedPartitions.set(0, nProducerPartitions);
-        return new PartitionCollector(ctx, getConnectorId(), index, expectedPartitions, sortMergeFrameReader, pbm);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorDescriptor.java
deleted file mode 100644
index ed1141e..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorDescriptor.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std.group;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-
-public class ClusteredGroupOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-    private final int[] groupFields;
-    private final IBinaryComparatorFactory[] comparatorFactories;
-    private final IClusteredAggregatorDescriptorFactory aggregatorFactory;
-
-    private static final long serialVersionUID = 1L;
-
-    public ClusteredGroupOperatorDescriptor(IOperatorDescriptorRegistry spec, int[] groupFields,
-            IBinaryComparatorFactory[] comparatorFactories, IClusteredAggregatorDescriptorFactory aggregatorFactory,
-            RecordDescriptor recordDescriptor) {
-        super(spec, 1, 1);
-        this.groupFields = groupFields;
-        this.comparatorFactories = comparatorFactories;
-        this.aggregatorFactory = aggregatorFactory;
-        recordDescriptors[0] = recordDescriptor;
-    }
-
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx,
-            final IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions)
-            throws HyracksDataException {
-        return new ClusteredGroupOperatorNodePushable(ctx, groupFields, comparatorFactories, aggregatorFactory,
-                recordDescProvider.getInputRecordDescriptor(getActivityId(), 0), recordDescriptors[0]);
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorNodePushable.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorNodePushable.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorNodePushable.java
deleted file mode 100644
index a86f28d..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupOperatorNodePushable.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std.group;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-
-class ClusteredGroupOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-    private final IHyracksTaskContext ctx;
-    private final int[] groupFields;
-    private final IBinaryComparatorFactory[] comparatorFactories;
-    private final IClusteredAggregatorDescriptorFactory aggregatorFactory;
-    private final RecordDescriptor inRecordDescriptor;
-    private final RecordDescriptor outRecordDescriptor;
-    private ClusteredGroupWriter pgw;
-
-    ClusteredGroupOperatorNodePushable(IHyracksTaskContext ctx, int[] groupFields,
-            IBinaryComparatorFactory[] comparatorFactories, IClusteredAggregatorDescriptorFactory aggregatorFactory,
-            RecordDescriptor inRecordDescriptor, RecordDescriptor outRecordDescriptor) {
-        this.ctx = ctx;
-        this.groupFields = groupFields;
-        this.comparatorFactories = comparatorFactories;
-        this.aggregatorFactory = aggregatorFactory;
-        this.inRecordDescriptor = inRecordDescriptor;
-        this.outRecordDescriptor = outRecordDescriptor;
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        final IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
-        for (int i = 0; i < comparatorFactories.length; ++i) {
-            comparators[i] = comparatorFactories[i].createBinaryComparator();
-        }
-        final ByteBuffer copyFrame = ctx.allocateFrame();
-        final FrameTupleAccessor copyFrameAccessor = new FrameTupleAccessor(ctx.getFrameSize(), inRecordDescriptor);
-        copyFrameAccessor.reset(copyFrame);
-        ByteBuffer outFrame = ctx.allocateFrame();
-        final FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        appender.reset(outFrame, true);
-        pgw = new ClusteredGroupWriter(ctx, groupFields, comparators, aggregatorFactory, inRecordDescriptor,
-                outRecordDescriptor, writer);
-        pgw.open();
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        pgw.nextFrame(buffer);
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        pgw.fail();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        pgw.close();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupWriter.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupWriter.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupWriter.java
deleted file mode 100644
index 605ae19..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/ClusteredGroupWriter.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std.group;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.std.group.AggregateState;
-
-public class ClusteredGroupWriter implements IFrameWriter {
-    private final int[] groupFields;
-    private final IBinaryComparator[] comparators;
-    private final IAggregatorDescriptor aggregator;
-    private final AggregateState aggregateState;
-    private final IFrameWriter writer;
-    private final ByteBuffer copyFrame;
-    private final FrameTupleAccessor inFrameAccessor;
-    private final FrameTupleAccessor copyFrameAccessor;
-
-    private final ByteBuffer outFrame;
-    private final FrameTupleAppender appender;
-    private boolean first;
-
-    public ClusteredGroupWriter(IHyracksTaskContext ctx, int[] groupFields, IBinaryComparator[] comparators,
-            IClusteredAggregatorDescriptorFactory aggregatorFactory, RecordDescriptor inRecordDesc,
-            RecordDescriptor outRecordDesc, IFrameWriter writer) throws HyracksDataException {
-        this.groupFields = groupFields;
-        this.comparators = comparators;
-        this.writer = writer;
-        copyFrame = ctx.allocateFrame();
-        inFrameAccessor = new FrameTupleAccessor(ctx.getFrameSize(), inRecordDesc);
-        copyFrameAccessor = new FrameTupleAccessor(ctx.getFrameSize(), inRecordDesc);
-        copyFrameAccessor.reset(copyFrame);
-
-        outFrame = ctx.allocateFrame();
-        appender = new FrameTupleAppender(ctx.getFrameSize(), outRecordDesc.getFields().length);
-        appender.reset(outFrame, true);
-
-        this.aggregator = aggregatorFactory.createAggregator(ctx, inRecordDesc, outRecordDesc, groupFields,
-                groupFields, writer, outFrame, appender);
-        this.aggregateState = aggregator.createAggregateStates();
-    }
-
-    @Override
-    public void open() throws HyracksDataException {
-        writer.open();
-        first = true;
-    }
-
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        inFrameAccessor.reset(buffer);
-        int nTuples = inFrameAccessor.getTupleCount();
-        for (int i = 0; i < nTuples; ++i) {
-            if (first) {
-                aggregator.init(inFrameAccessor, i, aggregateState);
-                first = false;
-            } else {
-                if (i == 0) {
-                    switchGroupIfRequired(copyFrameAccessor, copyFrameAccessor.getTupleCount() - 1, inFrameAccessor, i);
-                } else {
-                    switchGroupIfRequired(inFrameAccessor, i - 1, inFrameAccessor, i);
-                }
-
-            }
-        }
-        FrameUtils.copy(buffer, copyFrame);
-    }
-
-    private void switchGroupIfRequired(FrameTupleAccessor prevTupleAccessor, int prevTupleIndex,
-            FrameTupleAccessor currTupleAccessor, int currTupleIndex) throws HyracksDataException {
-        if (!sameGroup(prevTupleAccessor, prevTupleIndex, currTupleAccessor, currTupleIndex)) {
-            writeOutput(prevTupleAccessor, prevTupleIndex);
-            aggregator.init(currTupleAccessor, currTupleIndex, aggregateState);
-        } else {
-            aggregator.aggregate(currTupleAccessor, currTupleIndex, aggregateState);
-        }
-    }
-
-    private void writeOutput(final FrameTupleAccessor lastTupleAccessor, int lastTupleIndex)
-            throws HyracksDataException {
-        if (!aggregator.outputFinalResult(lastTupleAccessor, lastTupleIndex, aggregateState, appender)) {
-            FrameUtils.flushFrame(outFrame, writer);
-            appender.reset(outFrame, true);
-            if (!aggregator.outputFinalResult(lastTupleAccessor, lastTupleIndex, aggregateState, appender)) {
-                throw new HyracksDataException("The output of size " + " cannot be fit into a frame of size "
-                        + outFrame.array().length);
-            }
-        }
-
-    }
-
-    private boolean sameGroup(FrameTupleAccessor a1, int t1Idx, FrameTupleAccessor a2, int t2Idx) {
-        for (int i = 0; i < comparators.length; ++i) {
-            int fIdx = groupFields[i];
-            int s1 = a1.getTupleStartOffset(t1Idx) + a1.getFieldSlotsLength() + a1.getFieldStartOffset(t1Idx, fIdx);
-            int l1 = a1.getFieldLength(t1Idx, fIdx);
-            int s2 = a2.getTupleStartOffset(t2Idx) + a2.getFieldSlotsLength() + a2.getFieldStartOffset(t2Idx, fIdx);
-            int l2 = a2.getFieldLength(t2Idx, fIdx);
-            if (comparators[i].compare(a1.getBuffer().array(), s1, l1, a2.getBuffer().array(), s2, l2) != 0) {
-                return false;
-            }
-        }
-        return true;
-    }
-
-    @Override
-    public void fail() throws HyracksDataException {
-        writer.fail();
-    }
-
-    @Override
-    public void close() throws HyracksDataException {
-        if (!first) {
-            if (copyFrameAccessor.getTupleCount() > 0) {
-                writeOutput(copyFrameAccessor, copyFrameAccessor.getTupleCount() - 1);
-                if (appender.getTupleCount() > 0) {
-                    FrameUtils.flushFrame(outFrame, writer);
-                }
-            }
-        }
-        aggregateState.close();
-        writer.close();
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/ffc967fd/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/IAggregatorDescriptor.java
----------------------------------------------------------------------
diff --git a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/IAggregatorDescriptor.java b/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/IAggregatorDescriptor.java
deleted file mode 100644
index a46ae65..0000000
--- a/pregelix/pregelix-dataflow-std/src/main/java/edu/uci/ics/pregelix/dataflow/std/group/IAggregatorDescriptor.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.pregelix.dataflow.std.group;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.std.group.AggregateState;
-
-public interface IAggregatorDescriptor {
-
-    /**
-     * Create an aggregate state
-     * 
-     * @return
-     */
-    public AggregateState createAggregateStates();
-
-    /**
-     * Initialize the state based on the input tuple.
-     * 
-     * @param accessor
-     * @param tIndex
-     * @param fieldOutput
-     *            The data output for the frame containing the state. This may
-     *            be null, if the state is maintained as a java object
-     * @param state
-     *            The state to be initialized.
-     * @throws HyracksDataException
-     */
-    public void init(IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException;
-
-    /**
-     * Reset the aggregator. The corresponding aggregate state should be reset
-     * too. Note that here the frame is not an input argument, since it can be
-     * reset outside of the aggregator (simply reset the starting index of the
-     * buffer).
-     * 
-     * @param state
-     */
-    public void reset();
-
-    /**
-     * Aggregate the value. Aggregate state should be updated correspondingly.
-     * 
-     * @param accessor
-     * @param tIndex
-     * @param data
-     *            The buffer containing the state, if frame-based-state is used.
-     *            This means that it can be null if java-object-based-state is
-     *            used.
-     * @param offset
-     * @param state
-     *            The aggregate state.
-     * @throws HyracksDataException
-     */
-    public void aggregate(IFrameTupleAccessor accessor, int tIndex, AggregateState state) throws HyracksDataException;
-
-    /**
-     * Output the partial aggregation result.
-     * 
-     * @param fieldOutput
-     *            The data output for the output frame
-     * @param data
-     *            The buffer containing the aggregation state
-     * @param offset
-     * @param state
-     *            The aggregation state.
-     * @return TODO
-     * @throws HyracksDataException
-     */
-    public boolean outputPartialResult(IFrameTupleAccessor accessor, int tIndex, AggregateState state,
-            FrameTupleAppender appender) throws HyracksDataException;
-
-    /**
-     * Output the final aggregation result.
-     * 
-     * @param fieldOutput
-     *            The data output for the output frame
-     * @param data
-     *            The buffer containing the aggregation state
-     * @param offset
-     * @param state
-     *            The aggregation state.
-     * @return true if the group is already written; false--left for the group writer to write the grouped tuple
-     * @throws HyracksDataException
-     */
-    public boolean outputFinalResult(IFrameTupleAccessor accessor, int tIndex, AggregateState state,
-            FrameTupleAppender appender) throws HyracksDataException;
-
-    public void close();
-
-}


Mime
View raw message