asterixdb-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jianf...@apache.org
Subject [03/14] incubator-asterixdb-hyracks git commit: VariableSizeFrame(VSizeFrame) support for Hyracks.
Date Thu, 18 Jun 2015 04:22:20 GMT
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinHeapTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinHeapTest.java b/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinHeapTest.java
new file mode 100644
index 0000000..bcdabd1
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinHeapTest.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.std.structures;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+
+public class MinHeapTest extends AbstracHeapTest{
+
+    @Test
+    public void testInitialMinHeap() {
+        int capacity = 10;
+        MinHeap minHeap = new MinHeap(new IntFactory(), capacity);
+        assertTrue(minHeap.isEmpty());
+        assertEquals(0, minHeap.getNumEntries());
+    }
+
+    @Test
+    public void testInsertSmallAmountElements() {
+        int capacity = 10;
+        MinHeap minHeap = new MinHeap(new IntFactory(), capacity);
+        for (int i = 0; i < capacity; i++) {
+            minHeap.insert(new Int(capacity - i));
+        }
+        assertEquals(capacity, minHeap.getNumEntries());
+        assertFalse(minHeap.isEmpty());
+
+        assertGetMinHeapIsSorted(minHeap);
+
+        for (int i = 0; i < capacity; i++) {
+            minHeap.insert(new Int(random.nextInt()));
+        }
+        assertEquals(capacity, minHeap.getNumEntries());
+        assertFalse(minHeap.isEmpty());
+        assertGetMinHeapIsSorted(minHeap);
+    }
+
+    @Test
+    public void testInsertLargerThanCapacityElements() {
+        int capacity = 10;
+        MinHeap minHeap = new MinHeap(new IntFactory(), capacity);
+        for (int i = 0; i < capacity; i++) {
+            minHeap.insert(new Int(capacity - i));
+        }
+        assertEquals(capacity, minHeap.getNumEntries());
+        assertFalse(minHeap.isEmpty());
+        assertGetMinHeapIsSorted(minHeap);
+
+        for (int i = 0; i < capacity * 10; i++) {
+            minHeap.insert(new Int(random.nextInt()));
+        }
+        assertEquals(capacity * 10, minHeap.getNumEntries());
+        assertFalse(minHeap.isEmpty());
+        assertGetMinHeapIsSorted(minHeap);
+
+    }
+
+    @Test
+    public void testReplaceMin() {
+        int capacity = 10;
+        MinHeap minHeap = new MinHeap(new IntFactory(), capacity);
+        for (int i = 0; i < capacity; i++) {
+            minHeap.insert(new Int(i));
+        }
+        assertEquals(capacity, minHeap.getNumEntries());
+        assertFalse(minHeap.isEmpty());
+
+        for (int i = capacity; i < capacity * 2; i++) {
+            minHeap.replaceMin(new Int(i));
+        }
+        assertEquals(capacity, minHeap.getNumEntries());
+        assertFalse(minHeap.isEmpty());
+
+        Int minI = new Int();
+        Int peekI = new Int();
+        int i = 0;
+        while (!minHeap.isEmpty()) {
+            minHeap.peekMin(peekI);
+            minHeap.getMin(minI);
+            assertTrue(peekI.compareTo(minI) == 0);
+            assertEquals(i++ + capacity, minI.i);
+        }
+    }
+
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinMaxHeapTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinMaxHeapTest.java b/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinMaxHeapTest.java
new file mode 100644
index 0000000..1f16e08
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/structures/MinMaxHeapTest.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.std.structures;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+
+public class MinMaxHeapTest extends AbstracHeapTest {
+
+    @Test
+    public void testInitialMinMaxHeap() {
+        int capacity = 10;
+        MinMaxHeap minHeap = new MinMaxHeap(new IntFactory(), capacity);
+        assertTrue(minHeap.isEmpty());
+        assertEquals(0, minHeap.getNumEntries());
+    }
+
+    @Test
+    public void testInsertElements() {
+        int capacity = 10;
+        MinMaxHeap minMaxHeap = new MinMaxHeap(new IntFactory(), capacity);
+        for (int i = 0; i < capacity * 10; i++) {
+            minMaxHeap.insert(new Int(random.nextInt()));
+        }
+        assertEquals(capacity * 10, minMaxHeap.getNumEntries());
+        assertFalse(minMaxHeap.isEmpty());
+        assertGetMinHeapIsSorted(minMaxHeap);
+
+        for (int i = 0; i < capacity * 10; i++) {
+            minMaxHeap.insert(new Int(random.nextInt()));
+        }
+        assertEquals(capacity * 10, minMaxHeap.getNumEntries());
+        assertGetMaxHeapIsSorted(minMaxHeap);
+    }
+
+    @Test
+    public void testReplaceMin() {
+        int capacity = 10;
+        MinMaxHeap minMaxHeap = new MinMaxHeap(new IntFactory(), capacity);
+        for (int i = 0; i < capacity; i++) {
+            minMaxHeap.insert(new Int(i));
+        }
+        assertEquals(capacity, minMaxHeap.getNumEntries());
+        assertFalse(minMaxHeap.isEmpty());
+
+        for (int i = capacity; i < capacity * 2; i++) {
+            minMaxHeap.replaceMin(new Int(i));
+        }
+        assertEquals(capacity, minMaxHeap.getNumEntries());
+        assertFalse(minMaxHeap.isEmpty());
+
+        Int minI = new Int();
+        Int peekI = new Int();
+        int i = 0;
+        while (!minMaxHeap.isEmpty()) {
+            minMaxHeap.peekMin(peekI);
+            minMaxHeap.getMin(minI);
+            assertTrue(peekI.compareTo(minI) == 0);
+            assertEquals(i++ + capacity, minI.i);
+        }
+    }
+
+    @Test
+    public void testReplaceMax() {
+        int capacity = 10;
+        MinMaxHeap minMaxHeap = new MinMaxHeap(new IntFactory(), capacity);
+        for (int i = 0; i < capacity; i++) {
+            minMaxHeap.insert(new Int(i + capacity));
+        }
+        assertEquals(capacity, minMaxHeap.getNumEntries());
+        assertFalse(minMaxHeap.isEmpty());
+
+        Int maxI = new Int();
+        for (int i = capacity; i < capacity * 2; i++) {
+            minMaxHeap.peekMax(maxI);
+            minMaxHeap.replaceMax(new Int(i - capacity));
+        }
+        assertEquals(capacity, minMaxHeap.getNumEntries());
+        assertFalse(minMaxHeap.isEmpty());
+
+        System.out.println();
+        Int peekI = new Int();
+        int i = 0;
+        while (!minMaxHeap.isEmpty()) {
+            minMaxHeap.peekMax(peekI);
+            minMaxHeap.getMax(maxI);
+            assertTrue(peekI.compareTo(maxI) == 0);
+            assertEquals(capacity - i - 1, maxI.i);
+            i++;
+        }
+    }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/util/MathTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/util/MathTest.java b/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/util/MathTest.java
new file mode 100644
index 0000000..332ac98
--- /dev/null
+++ b/hyracks/hyracks-dataflow-std/src/test/java/edu/uci/ics/hyracks/dataflow/std/util/MathTest.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.std.util;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.Random;
+
+import org.junit.Test;
+
+public class MathTest {
+
+    @Test
+    public void testLog2() {
+        Random random = new Random(System.currentTimeMillis());
+        for (int i = 0; i < 31; i++) {
+            assertTrue(MathUtil.log2Floor((int) Math.pow(2, i)) == i);
+            for(int x = 0; x < 10; x++){
+                float extra = random.nextFloat();
+                while (extra >= 1.0){
+                    extra = random.nextFloat();
+                }
+                assertTrue(MathUtil.log2Floor((int) Math.pow(2, i + extra)) == i);
+            }
+        }
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
index ed4cf0c..c869362 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
@@ -16,10 +16,10 @@
 package edu.uci.ics.hyracks.examples.btree.helper;
 
 import java.io.DataOutput;
-import java.nio.ByteBuffer;
 import java.util.HashSet;
 import java.util.Random;
 
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
 import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
@@ -28,7 +28,6 @@ import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.job.IOperatorDescriptorRegistry;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
@@ -61,8 +60,7 @@ public class DataGenOperatorDescriptor extends AbstractSingleActivityOperatorDes
     public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
             IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
 
-        final ByteBuffer outputFrame = ctx.allocateFrame();
-        final FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        final FrameTupleAppender appender = new FrameTupleAppender(new VSizeFrame(ctx));
         final RecordDescriptor recDesc = recordDescriptors[0];
         final ArrayTupleBuilder tb = new ArrayTupleBuilder(recDesc.getFields().length);
         final Random rnd = new Random(randomSeed);
@@ -79,7 +77,6 @@ public class DataGenOperatorDescriptor extends AbstractSingleActivityOperatorDes
             public void initialize() throws HyracksDataException {
                 writer.open();
                 try {
-                    appender.reset(outputFrame, true);
                     for (int i = 0; i < numRecords; i++) {
                         tb.reset();
                         for (int j = 0; j < recDesc.getFieldCount(); j++) {
@@ -87,14 +84,13 @@ public class DataGenOperatorDescriptor extends AbstractSingleActivityOperatorDes
                         }
 
                         if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                            FrameUtils.flushFrame(outputFrame, writer);
-                            appender.reset(outputFrame, true);
+                            appender.flush(writer, true);
                             if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
                                 throw new HyracksDataException("Record size (" + tb.getSize() + ") larger than frame size (" + appender.getBuffer().capacity() + ")");
                             }
                         }
                     }
-                    FrameUtils.flushFrame(outputFrame, writer);
+                    appender.flush(writer, true);
                 } catch (Exception e) {
                     writer.fail();
                     throw new HyracksDataException(e);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/.gitignore
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/.gitignore b/hyracks/hyracks-examples/hyracks-integration-tests/.gitignore
new file mode 100644
index 0000000..be303ea
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/.gitignore
@@ -0,0 +1,3 @@
+primary*/
+secondary*/
+inv*/

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java
index 29be04b..5268150 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/comm/SerializationDeserializationTest.java
@@ -24,8 +24,10 @@ import java.util.logging.Logger;
 
 import org.junit.Test;
 
+import edu.uci.ics.hyracks.api.comm.IFrame;
 import edu.uci.ics.hyracks.api.comm.IFrameReader;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.IDataWriter;
 import edu.uci.ics.hyracks.api.dataflow.IOpenableDataReader;
@@ -48,12 +50,12 @@ public class SerializationDeserializationTest {
         private final IHyracksTaskContext ctx;
         private static final int FRAME_SIZE = 32768;
         private RecordDescriptor rDes;
-        private List<ByteBuffer> buffers;
+        private List<IFrame> buffers;
 
         public SerDeserRunner(RecordDescriptor rDes) throws HyracksException {
             ctx = TestUtils.create(FRAME_SIZE);
             this.rDes = rDes;
-            buffers = new ArrayList<ByteBuffer>();
+            buffers = new ArrayList<>();
         }
 
         public IOpenableDataWriter<Object[]> createWriter() throws HyracksDataException {
@@ -64,8 +66,8 @@ public class SerializationDeserializationTest {
 
                 @Override
                 public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-                    ByteBuffer toBuf = ctx.allocateFrame();
-                    toBuf.put(buffer);
+                    IFrame toBuf = new VSizeFrame(ctx);
+                    toBuf.getBuffer().put(buffer);
                     buffers.add(toBuf);
                 }
 
@@ -89,12 +91,12 @@ public class SerializationDeserializationTest {
                 }
 
                 @Override
-                public boolean nextFrame(ByteBuffer buffer) throws HyracksDataException {
+                public boolean nextFrame(IFrame frame) throws HyracksDataException {
                     if (i < buffers.size()) {
-                        ByteBuffer buf = buffers.get(i);
-                        buf.flip();
-                        buffer.put(buf);
-                        buffer.flip();
+                        IFrame buf = buffers.get(i);
+                        buf.getBuffer().flip();
+                        frame.getBuffer().put(buf.getBuffer());
+                        frame.getBuffer().flip();
                         ++i;
                         return true;
                     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
index 1150cf3..30f89e2 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -15,10 +15,11 @@
 package edu.uci.ics.hyracks.tests.integration;
 
 import java.io.BufferedReader;
+import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileReader;
+import java.io.FileWriter;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
@@ -34,6 +35,7 @@ import org.junit.rules.TemporaryFolder;
 import edu.uci.ics.hyracks.api.client.HyracksConnection;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
 import edu.uci.ics.hyracks.api.dataset.ResultSetId;
@@ -45,6 +47,7 @@ import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
 import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
 import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
 import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+import edu.uci.ics.hyracks.control.nc.resources.memory.FrameManager;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
 
@@ -138,12 +141,11 @@ public abstract class AbstractIntegrationTest {
         hcc.waitForCompletion(jobId);
     }
 
+
     protected List<String> readResults(JobSpecification spec, JobId jobId, ResultSetId resultSetId) throws Exception {
         int nReaders = 1;
-        ByteBuffer resultBuffer = ByteBuffer.allocate(spec.getFrameSize());
-        resultBuffer.clear();
 
-        IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor(spec.getFrameSize());
+        IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor();
 
         IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
         IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, resultSetId);
@@ -151,16 +153,18 @@ public abstract class AbstractIntegrationTest {
         List<String> resultRecords = new ArrayList<String>();
         ByteBufferInputStream bbis = new ByteBufferInputStream();
 
-        int readSize = reader.read(resultBuffer);
+        FrameManager resultDisplayFrameMgr = new FrameManager(spec.getFrameSize());
+        VSizeFrame frame = new VSizeFrame(resultDisplayFrameMgr);
+        int readSize = reader.read(frame);
 
         while (readSize > 0) {
 
             try {
-                frameTupleAccessor.reset(resultBuffer);
+                frameTupleAccessor.reset(frame.getBuffer());
                 for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
                     int start = frameTupleAccessor.getTupleStartOffset(tIndex);
                     int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
-                    bbis.setByteBuffer(resultBuffer, start);
+                    bbis.setByteBuffer(frame.getBuffer(), start);
                     byte[] recordBytes = new byte[length];
                     bbis.read(recordBytes, 0, length);
                     resultRecords.add(new String(recordBytes, 0, length));
@@ -169,8 +173,7 @@ public abstract class AbstractIntegrationTest {
                 bbis.close();
             }
 
-            resultBuffer.clear();
-            readSize = reader.read(resultBuffer);
+            readSize = reader.read(frame);
         }
         return resultRecords;
     }
@@ -198,6 +201,22 @@ public abstract class AbstractIntegrationTest {
         return true;
     }
 
+    protected void runTestAndStoreResult(JobSpecification spec, File file) throws Exception {
+        JobId jobId = executeTest(spec);
+
+        BufferedWriter output = new BufferedWriter(new FileWriter(file));
+        List<String> results;
+        for (int i = 0; i < spec.getResultSetIds().size(); i++) {
+            results = readResults(spec, jobId, spec.getResultSetIds().get(i));
+            for(String str : results) {
+                output.write(str);
+            }
+        }
+        output.close();
+
+        hcc.waitForCompletion(jobId);
+    }
+
     protected File createTempFile() throws IOException {
         File tempFile = File.createTempFile(getClass().getName(), ".tmp", outputFolder.getRoot());
         if (LOGGER.isLoggable(Level.INFO)) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index 970f2fe..602e193 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -16,7 +16,6 @@ package edu.uci.ics.hyracks.tests.integration;
 
 import java.io.File;
 import java.io.IOException;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.EnumSet;
 import java.util.List;
@@ -33,6 +32,7 @@ import org.junit.rules.TemporaryFolder;
 import edu.uci.ics.hyracks.api.client.HyracksConnection;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDataset;
 import edu.uci.ics.hyracks.api.dataset.IHyracksDatasetReader;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
@@ -44,6 +44,7 @@ import edu.uci.ics.hyracks.control.cc.ClusterControllerService;
 import edu.uci.ics.hyracks.control.common.controllers.CCConfig;
 import edu.uci.ics.hyracks.control.common.controllers.NCConfig;
 import edu.uci.ics.hyracks.control.nc.NodeControllerService;
+import edu.uci.ics.hyracks.control.nc.resources.memory.FrameManager;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ResultFrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
 
@@ -66,7 +67,8 @@ public abstract class AbstractMultiNCIntegrationTest {
     public TemporaryFolder outputFolder = new TemporaryFolder();
 
     public AbstractMultiNCIntegrationTest() {
-        outputFiles = new ArrayList<File>();;
+        outputFiles = new ArrayList<File>();
+        ;
     }
 
     @BeforeClass
@@ -124,10 +126,10 @@ public abstract class AbstractMultiNCIntegrationTest {
 
         int nReaders = 1;
 
-        ByteBuffer resultBuffer = ByteBuffer.allocate(spec.getFrameSize());
-        resultBuffer.clear();
+        FrameManager resultDisplayFrameMgr = new FrameManager(spec.getFrameSize());
+        VSizeFrame resultFrame = new VSizeFrame(resultDisplayFrameMgr);
 
-        IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor(spec.getFrameSize());
+        IFrameTupleAccessor frameTupleAccessor = new ResultFrameTupleAccessor();
 
         IHyracksDataset hyracksDataset = new HyracksDataset(hcc, spec.getFrameSize(), nReaders);
         IHyracksDatasetReader reader = hyracksDataset.createReader(jobId, spec.getResultSetIds().get(0));
@@ -135,16 +137,16 @@ public abstract class AbstractMultiNCIntegrationTest {
         JSONArray resultRecords = new JSONArray();
         ByteBufferInputStream bbis = new ByteBufferInputStream();
 
-        int readSize = reader.read(resultBuffer);
+        int readSize = reader.read(resultFrame);
 
         while (readSize > 0) {
 
             try {
-                frameTupleAccessor.reset(resultBuffer);
+                frameTupleAccessor.reset(resultFrame.getBuffer());
                 for (int tIndex = 0; tIndex < frameTupleAccessor.getTupleCount(); tIndex++) {
                     int start = frameTupleAccessor.getTupleStartOffset(tIndex);
                     int length = frameTupleAccessor.getTupleEndOffset(tIndex) - start;
-                    bbis.setByteBuffer(resultBuffer, start);
+                    bbis.setByteBuffer(resultFrame.getBuffer(), start);
                     byte[] recordBytes = new byte[length];
                     bbis.read(recordBytes, 0, length);
                     resultRecords.put(new String(recordBytes, 0, length));
@@ -157,8 +159,7 @@ public abstract class AbstractMultiNCIntegrationTest {
                 }
             }
 
-            resultBuffer.clear();
-            readSize = reader.read(resultBuffer);
+            readSize = reader.read(resultFrame);
         }
 
         hcc.waitForCompletion(jobId);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
index a2ef99a..9b77ec5 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/OptimizedSortMergeTest.java
@@ -42,9 +42,10 @@ import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.LimitOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.OptimizedExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.TopKSorterOperatorDescriptor;
 import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class OptimizedSortMergeTest extends AbstractIntegrationTest {
@@ -72,17 +73,22 @@ public class OptimizedSortMergeTest extends AbstractIntegrationTest {
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
 
-        OptimizedExternalSortOperatorDescriptor sorter = new OptimizedExternalSortOperatorDescriptor(spec, 4,
-                new int[] { 1, 0 }, new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+        int outputLimit = 5; // larger than the total record numbers.
+        TopKSorterOperatorDescriptor sorter = new TopKSorterOperatorDescriptor(spec, 4,
+                outputLimit, new int[] { 1, 0 }, null, new IBinaryComparatorFactory[] {
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
 
         ResultSetId rsId = new ResultSetId(1);
         spec.addResultSetId(rsId);
 
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, false, false,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        File file = File.createTempFile(getClass().getName(), ".tmp");
+        IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
+                new FileSplit[] { new FileSplit(NC1_ID, file.getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|");
+
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -98,6 +104,7 @@ public class OptimizedSortMergeTest extends AbstractIntegrationTest {
                         new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
 
         runTest(spec);
+        System.out.println("Result write into :" + file.getAbsolutePath());
     }
 
     @Test
@@ -123,11 +130,11 @@ public class OptimizedSortMergeTest extends AbstractIntegrationTest {
                         UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
 
-        int outputLimit = 200;
-        OptimizedExternalSortOperatorDescriptor sorter = new OptimizedExternalSortOperatorDescriptor(spec, 4,
-                outputLimit, new int[] { 1, 0 }, new IBinaryComparatorFactory[] {
-                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+        int outputLimit = 20;
+        TopKSorterOperatorDescriptor sorter = new TopKSorterOperatorDescriptor(spec, 4,
+                outputLimit, new int[] { 1, 0 }, null, new IBinaryComparatorFactory[] {
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
 
         LimitOperatorDescriptor filter = new LimitOperatorDescriptor(spec, ordersDesc, outputLimit);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/VSizeFrameSortMergeTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/VSizeFrameSortMergeTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/VSizeFrameSortMergeTest.java
new file mode 100644
index 0000000..212a99c
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/VSizeFrameSortMergeTest.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.integration;
+
+import java.io.File;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.normalizers.UTF8StringNormalizedKeyComputerFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNPartitioningMergingConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+
+public class VSizeFrameSortMergeTest extends AbstractIntegrationTest {
+
+    public static String[] INPUTS = { "data/tpch0.001/orders-part1.tbl", "data/tpch0.001/orders-part2.tbl" };
+
+    FileSplit[] ordersSplits = new FileSplit[] {
+            new FileSplit(NC1_ID, new FileReference(new File(INPUTS[0]))),
+            new FileSplit(NC2_ID, new FileReference(new File(INPUTS[1]))) };
+    IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+    RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE });
+
+    @Test
+    public void sortNormalMergeTest() throws Exception {
+        sortTask(1024, 4);
+        sortTask(256, 4);
+    }
+
+    @Test
+    public void sortLargeMergeTest() throws Exception {
+        sortTask(32, 128);
+        sortTask(16, 256);
+        sortTask(16, 10240);
+    }
+
+    public void sortTask(int frameSize, int frameLimit) throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID, NC2_ID);
+        //                PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID );
+
+        spec.setFrameSize(frameSize);
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, frameLimit, new int[] { 1, 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID, NC2_ID);
+
+        File file = File.createTempFile(getClass().getName(), ".tmp");
+
+        IFileSplitProvider outputSplitProvider = new ConstantFileSplitProvider(
+                new FileSplit[] { new FileSplit(NC1_ID, file.getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outputSplitProvider, "|");
+
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(
+                new MToNPartitioningMergingConnectorDescriptor(spec, new FieldHashPartitionComputerFactory(new int[] {
+                        1, 0 }, new IBinaryHashFunctionFactory[] {
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryHashFunctionFactory.of(UTF8StringPointable.FACTORY) }), new int[] { 1, 0 },
+                        new IBinaryComparatorFactory[] {
+                                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                        new UTF8StringNormalizedKeyComputerFactory()), sorter, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+        System.out.println("Result write into :" + file.getAbsolutePath());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/AbstractRunGeneratorTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/AbstractRunGeneratorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/AbstractRunGeneratorTest.java
new file mode 100644
index 0000000..c4faa1a
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/AbstractRunGeneratorTest.java
@@ -0,0 +1,279 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.unit;
+
+import static org.junit.Assert.assertTrue;
+
+import java.io.DataInputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.comm.IFrame;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.sort.AbstractSortRunGenerator;
+import edu.uci.ics.hyracks.dataflow.std.sort.RunAndMaxFrameSizePair;
+import edu.uci.ics.hyracks.dataflow.std.sort.util.GroupFrameAccessor;
+import edu.uci.ics.hyracks.dataflow.std.sort.util.GroupVSizeFrame;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public abstract class AbstractRunGeneratorTest {
+    static TestUtils testUtils = new TestUtils();
+    static ISerializerDeserializer[] SerDers = new ISerializerDeserializer[] {
+            IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+    static RecordDescriptor RecordDesc = new RecordDescriptor(SerDers);
+    static Random GRandom = new Random(System.currentTimeMillis());
+    static int[] SortFields = new int[] { 0, 1 };
+    static IBinaryComparatorFactory[] ComparatorFactories = new IBinaryComparatorFactory[] {
+            PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY),
+            PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) };
+
+    static void assertMaxFrameSizesAreAllEqualsTo(List<RunAndMaxFrameSizePair> maxSize, int pageSize) {
+        for (int i = 0; i < maxSize.size(); i++) {
+            assertTrue(maxSize.get(i).maxFrameSize == pageSize);
+        }
+    }
+
+    abstract AbstractSortRunGenerator getSortRunGenerator(IHyracksTaskContext ctx, int frameLimit, int numOfInputRecord)
+            throws HyracksDataException;
+
+    protected List<RunAndMaxFrameSizePair> testSortRecords(int pageSize, int frameLimit, int numRuns, int minRecordSize,
+            int maxRecordSize, HashMap<Integer, String> specialData) throws HyracksDataException {
+        IHyracksTaskContext ctx = testUtils.create(pageSize);
+
+        HashMap<Integer, String> keyValuePair = new HashMap<>();
+        List<IFrame> frameList = new ArrayList<>();
+        prepareData(ctx, frameList, pageSize * frameLimit * numRuns, minRecordSize, maxRecordSize,
+                specialData, keyValuePair);
+        AbstractSortRunGenerator runGenerator = getSortRunGenerator(ctx, frameLimit, keyValuePair.size());
+        runGenerator.open();
+        for (IFrame frame : frameList) {
+            runGenerator.nextFrame(frame.getBuffer());
+        }
+        runGenerator.close();
+        matchResult(ctx, runGenerator.getRuns(), keyValuePair);
+        return runGenerator.getRuns();
+    }
+
+    static void matchResult(IHyracksTaskContext ctx, List<RunAndMaxFrameSizePair> runs,
+            Map<Integer, String> keyValuePair) throws HyracksDataException {
+        IFrame frame = new VSizeFrame(ctx);
+        FrameTupleAccessor fta = new FrameTupleAccessor(RecordDesc);
+
+        HashMap<Integer, String> copyMap = new HashMap<>(keyValuePair);
+        assertReadSorted(runs, fta, frame, copyMap);
+
+        HashMap<Integer, String> copyMap2 = new HashMap<>(keyValuePair);
+        int maxFrameSizes = 0;
+        for (RunAndMaxFrameSizePair run : runs) {
+            maxFrameSizes = Math.max(maxFrameSizes, run.maxFrameSize);
+        }
+        GroupVSizeFrame gframe = new GroupVSizeFrame(ctx, maxFrameSizes);
+        GroupFrameAccessor gfta = new GroupFrameAccessor(ctx.getInitialFrameSize(), RecordDesc);
+        assertReadSorted(runs, gfta, gframe, copyMap2);
+    }
+
+    static int assertFTADataIsSorted(IFrameTupleAccessor fta, Map<Integer, String> keyValuePair, int preKey)
+            throws HyracksDataException {
+
+        ByteBufferInputStream bbis = new ByteBufferInputStream();
+        DataInputStream di = new DataInputStream(bbis);
+        for (int i = 0; i < fta.getTupleCount(); i++) {
+            bbis.setByteBuffer(fta.getBuffer(),
+                    fta.getTupleStartOffset(i) + fta.getFieldStartOffset(i, 0) + fta.getFieldSlotsLength());
+            int key = (int) RecordDesc.getFields()[0].deserialize(di);
+            bbis.setByteBuffer(fta.getBuffer(),
+                    fta.getTupleStartOffset(i) + fta.getFieldStartOffset(i, 1) + fta.getFieldSlotsLength());
+            String value = (String) RecordDesc.getFields()[1].deserialize(di);
+
+            if (!keyValuePair.get(key).equals(value)) {
+                assertTrue(false);
+            }
+            keyValuePair.remove(key);
+            assertTrue(key >= preKey);
+            preKey = key;
+        }
+        return preKey;
+    }
+
+    static void assertReadSorted(List<RunAndMaxFrameSizePair> runs, IFrameTupleAccessor fta, IFrame frame,
+            Map<Integer, String> keyValuePair) throws HyracksDataException {
+
+        assertTrue(runs.size() > 0);
+        for (RunAndMaxFrameSizePair run : runs) {
+            run.run.open();
+            int preKey = Integer.MIN_VALUE;
+            while (run.run.nextFrame(frame)) {
+                fta.reset(frame.getBuffer());
+                preKey = assertFTADataIsSorted(fta, keyValuePair, preKey);
+            }
+            run.run.close();
+        }
+        assertTrue(keyValuePair.isEmpty());
+    }
+
+    static void prepareData(IHyracksTaskContext ctx, List<IFrame> frameList, int minDataSize, int minRecordSize,
+            int maxRecordSize, Map<Integer, String> specialData, Map<Integer, String> keyValuePair)
+            throws HyracksDataException {
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(RecordDesc.getFieldCount());
+        FrameTupleAppender appender = new FrameTupleAppender();
+
+        int datasize = 0;
+        if (specialData != null) {
+            for (Map.Entry<Integer, String> entry : specialData.entrySet()) {
+                tb.reset();
+                tb.addField(IntegerSerializerDeserializer.INSTANCE, entry.getKey());
+                tb.addField(UTF8StringSerializerDeserializer.INSTANCE, entry.getValue());
+
+                VSizeFrame frame = new VSizeFrame(ctx, FrameHelper
+                        .calcAlignedFrameSizeToStore(tb.getFieldEndOffsets().length, tb.getSize(), ctx.getInitialFrameSize()));
+                appender.reset(frame, true);
+                assertTrue(appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()));
+                frameList.add(frame);
+                datasize += frame.getFrameSize();
+            }
+            keyValuePair.putAll(specialData);
+        }
+
+        VSizeFrame frame = new VSizeFrame(ctx, ctx.getInitialFrameSize());
+        appender.reset(frame, true);
+        while (datasize < minDataSize) {
+            tb.reset();
+            int key = GRandom.nextInt(minDataSize + 1);
+            if (!keyValuePair.containsKey(key)) {
+                String value = generateRandomRecord(minRecordSize, maxRecordSize);
+                tb.addField(IntegerSerializerDeserializer.INSTANCE, key);
+                tb.addField(UTF8StringSerializerDeserializer.INSTANCE, value);
+
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    frameList.add(frame);
+                    datasize += frame.getFrameSize();
+                    frame = new VSizeFrame(ctx, FrameHelper
+                            .calcAlignedFrameSizeToStore(tb.getFieldEndOffsets().length, tb.getSize(),
+                                    ctx.getInitialFrameSize()));
+                    appender.reset(frame, true);
+                    assertTrue(appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize()));
+                }
+
+                keyValuePair.put(key, value);
+            }
+        }
+        if (appender.getTupleCount() > 0) {
+            frameList.add(frame);
+        }
+
+    }
+
+    static String generateRandomRecord(int minRecordSize, int maxRecordSize)
+            throws HyracksDataException {
+        int size = GRandom.nextInt(maxRecordSize - minRecordSize + 1) + minRecordSize;
+        return generateRandomFixSizedString(size);
+
+    }
+
+    static String generateRandomFixSizedString(int size) {
+        StringBuilder sb = new StringBuilder(size);
+        for (; size >= 0; --size) {
+            char ch = (char) (GRandom.nextInt(26) + 97);
+            sb.append(ch);
+        }
+        return sb.toString();
+    }
+
+    static HashMap<Integer, String> generateBigObject(int pageSize, int times) {
+        HashMap<Integer, String> map = new HashMap<>(1);
+        for (int i = 1; i < times; i++) {
+            map.put(GRandom.nextInt(), generateRandomFixSizedString(pageSize * i));
+        }
+        return map;
+    }
+
+    @Test
+    public void testAllSmallRecords() throws HyracksDataException {
+        int pageSize = 512;
+        int frameLimit = 4;
+        int numRuns = 2;
+        int minRecordSize = pageSize / 8;
+        int maxRecordSize = pageSize / 8;
+        List<RunAndMaxFrameSizePair> maxSize = testSortRecords(pageSize, frameLimit, numRuns, minRecordSize,
+                maxRecordSize, null);
+        assertMaxFrameSizesAreAllEqualsTo(maxSize, pageSize);
+    }
+
+    @Test
+    public void testAllLargeRecords() throws HyracksDataException {
+        int pageSize = 2048;
+        int frameLimit = 4;
+        int numRuns = 2;
+        int minRecordSize = pageSize;
+        int maxRecordSize = (int) (pageSize * 1.8);
+        List<RunAndMaxFrameSizePair> size = testSortRecords(pageSize, frameLimit, numRuns, minRecordSize, maxRecordSize,
+                null);
+        assertMaxFrameSizesAreAllEqualsTo(size, pageSize * 2);
+    }
+
+    @Test
+    public void testMixedLargeRecords() throws HyracksDataException {
+        int pageSize = 128;
+        int frameLimit = 4;
+        int numRuns = 4;
+        int minRecordSize = 20;
+        int maxRecordSize = pageSize / 2;
+        HashMap<Integer, String> specialPair = generateBigObject(pageSize, frameLimit - 1);
+        List<RunAndMaxFrameSizePair> size = testSortRecords(pageSize, frameLimit, numRuns, minRecordSize, maxRecordSize,
+                specialPair);
+
+        int max = 0;
+        for (RunAndMaxFrameSizePair run : size) {
+            max = Math.max(max, run.maxFrameSize);
+        }
+        assertTrue(max == pageSize * (frameLimit - 1));
+    }
+
+    @Test(expected = HyracksDataException.class)
+    public void testTooBigRecordWillThrowException() throws HyracksDataException {
+        int pageSize = 1024;
+        int frameLimit = 8;
+        int numRuns = 8;
+        HashMap<Integer, String> specialPair = generateBigObject(pageSize, frameLimit);
+        int minRecordSize = 10;
+        int maxRecordSize = pageSize / 2;
+        List<RunAndMaxFrameSizePair> size = testSortRecords(pageSize, frameLimit, numRuns, minRecordSize, maxRecordSize,
+                specialPair);
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/ExternalSortRunGeneratorTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/ExternalSortRunGeneratorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/ExternalSortRunGeneratorTest.java
new file mode 100644
index 0000000..4d7558b
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/ExternalSortRunGeneratorTest.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.unit;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.sort.AbstractSortRunGenerator;
+import edu.uci.ics.hyracks.dataflow.std.sort.Algorithm;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortRunGenerator;
+
+public class ExternalSortRunGeneratorTest extends AbstractRunGeneratorTest {
+
+    @Override
+    AbstractSortRunGenerator getSortRunGenerator(IHyracksTaskContext ctx, int frameLimit, int numOfInputRecord)
+            throws HyracksDataException {
+        return new ExternalSortRunGenerator(ctx, SortFields, null, ComparatorFactories, RecordDesc,
+                Algorithm.MERGE_SORT, frameLimit);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HeapSortRunGeneratorTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HeapSortRunGeneratorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HeapSortRunGeneratorTest.java
new file mode 100644
index 0000000..00eca70
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HeapSortRunGeneratorTest.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.unit;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.sort.AbstractSortRunGenerator;
+import edu.uci.ics.hyracks.dataflow.std.sort.HeapSortRunGenerator;
+
+public class HeapSortRunGeneratorTest extends AbstractRunGeneratorTest {
+    @Override
+    AbstractSortRunGenerator getSortRunGenerator(IHyracksTaskContext ctx, int frameLimit, int numOfInputRecord)
+            throws HyracksDataException {
+        return new HeapSortRunGenerator(ctx, frameLimit, numOfInputRecord, SortFields, null, ComparatorFactories,
+                RecordDesc);
+    }
+
+    @Test
+    public void testTopK(){
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HybridSortRunGenerator.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HybridSortRunGenerator.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HybridSortRunGenerator.java
new file mode 100644
index 0000000..f7ecd5e
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/HybridSortRunGenerator.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.unit;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.sort.AbstractSortRunGenerator;
+import edu.uci.ics.hyracks.dataflow.std.sort.HybridTopKSortRunGenerator;
+
+public class HybridSortRunGenerator extends AbstractRunGeneratorTest {
+    @Override
+    AbstractSortRunGenerator getSortRunGenerator(IHyracksTaskContext ctx, int frameLimit, int numOfInputRecord)
+            throws HyracksDataException {
+        return new HybridTopKSortRunGenerator(ctx, frameLimit, numOfInputRecord, SortFields, null, ComparatorFactories,
+                RecordDesc);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/RunMergingFrameReaderTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/RunMergingFrameReaderTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/RunMergingFrameReaderTest.java
new file mode 100644
index 0000000..d5355b8
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/RunMergingFrameReaderTest.java
@@ -0,0 +1,409 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.unit;
+
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.ComparatorFactories;
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.GRandom;
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.RecordDesc;
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.SortFields;
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.generateRandomRecord;
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.matchResult;
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.prepareData;
+import static edu.uci.ics.hyracks.tests.unit.ExternalSortRunGeneratorTest.testUtils;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.DataInputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.comm.IFrame;
+import edu.uci.ics.hyracks.api.comm.IFrameReader;
+import edu.uci.ics.hyracks.api.comm.VSizeFrame;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.ByteBufferInputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.sort.Algorithm;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortRunGenerator;
+import edu.uci.ics.hyracks.dataflow.std.sort.RunAndMaxFrameSizePair;
+import edu.uci.ics.hyracks.dataflow.std.sort.RunMergingFrameReader;
+import edu.uci.ics.hyracks.dataflow.std.sort.util.GroupVSizeFrame;
+
+public class RunMergingFrameReaderTest {
+    static IBinaryComparator[] Comparators = new IBinaryComparator[] {
+            ComparatorFactories[0].createBinaryComparator(),
+            ComparatorFactories[1].createBinaryComparator(),
+    };
+
+    static class TestFrameReader implements IFrameReader {
+
+        private final int pageSize;
+        private final int numFrames;
+        private final int minRecordSize;
+        private final int maxRecordSize;
+        private TreeMap<Integer, String> result = new TreeMap<>();
+        int maxFrameSize;
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(RecordDesc.getFieldCount());
+        FrameTupleAppender appender = new FrameTupleAppender();
+        private Iterator<Map.Entry<Integer, String>> iterator;
+        private Map.Entry<Integer, String> lastEntry;
+
+        TestFrameReader(int pageSize, int numFrames, int minRecordSize, int maxRecordSize) {
+            this.pageSize = pageSize;
+            this.numFrames = numFrames;
+            this.minRecordSize = minRecordSize;
+            this.maxRecordSize = maxRecordSize;
+            this.maxFrameSize = pageSize;
+        }
+
+        @Override
+        public void open() throws HyracksDataException {
+            result.clear();
+            int maxTupleSize = prepareSortedData(numFrames * pageSize, minRecordSize, maxRecordSize, null, result);
+            maxFrameSize = FrameHelper.calcAlignedFrameSizeToStore(0, maxTupleSize, pageSize);
+            iterator = result.entrySet().iterator();
+        }
+
+        @Override
+        public boolean nextFrame(IFrame frame) throws HyracksDataException {
+            if (lastEntry == null && !iterator.hasNext()) {
+                return false;
+            }
+            if (lastEntry == null) {
+                lastEntry = iterator.next();
+            }
+            appender.reset(frame, true);
+            while (true) {
+                tb.reset();
+                tb.addField(IntegerSerializerDeserializer.INSTANCE, lastEntry.getKey());
+                tb.addField(UTF8StringSerializerDeserializer.INSTANCE, lastEntry.getValue());
+                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+                    break;
+                } else {
+                    if (iterator.hasNext()) {
+                        lastEntry = iterator.next();
+                    } else {
+                        lastEntry = null;
+                        break;
+                    }
+
+                }
+            }
+            //            printFrame(frame.getBuffer());
+            return true;
+        }
+
+        private void printFrame(ByteBuffer buffer) {
+            FrameTupleAccessor fta = new FrameTupleAccessor(RecordDesc);
+            fta.reset(buffer);
+            fta.prettyPrint();
+        }
+
+        @Override
+        public void close() throws HyracksDataException {
+        }
+    }
+
+    static int prepareSortedData(int minDataSize, int minRecordSize, int maxRecordSize,
+            Map<Integer, String> specialData, Map<Integer, String> result) throws HyracksDataException {
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(RecordDesc.getFieldCount());
+
+        int datasize = 0;
+        int maxtuple = 0;
+        if (specialData != null) {
+            for (Map.Entry<Integer, String> entry : specialData.entrySet()) {
+                tb.reset();
+                tb.addField(IntegerSerializerDeserializer.INSTANCE, entry.getKey());
+                tb.addField(UTF8StringSerializerDeserializer.INSTANCE, entry.getValue());
+                int size = tb.getSize() + tb.getFieldEndOffsets().length * 4;
+                datasize += size;
+                if (size > maxtuple) {
+                    maxtuple = size;
+                }
+            }
+            result.putAll(specialData);
+        }
+
+        while (datasize < minDataSize) {
+            String value = generateRandomRecord(minRecordSize, maxRecordSize);
+            tb.reset();
+            int key = GRandom.nextInt(datasize + 1);
+            if (!result.containsKey(key)) {
+                tb.addField(IntegerSerializerDeserializer.INSTANCE, key);
+                tb.addField(UTF8StringSerializerDeserializer.INSTANCE, value);
+                int size = tb.getSize() + tb.getFieldEndOffsets().length * 4;
+                datasize += size;
+                if (size > maxtuple) {
+                    maxtuple = size;
+                }
+                if (datasize < minDataSize) {
+                    result.put(key, value);
+                }
+            }
+        }
+
+        return maxtuple;
+    }
+
+    @Test
+    public void testOnlyOneRunShouldMerge() throws HyracksDataException {
+        int pageSize = 128;
+        int numRuns = 1;
+        int numFramesPerRun = 1;
+        int minRecordSize = pageSize / 10;
+        int maxRecordSize = pageSize / 8;
+
+        IHyracksTaskContext ctx = testUtils.create(pageSize);
+        List<Map<Integer, String>> keyValueMapList = new ArrayList<>(numRuns);
+        List<TestFrameReader> readerList = new ArrayList<>(numRuns);
+        List<IFrame> frameList = new ArrayList<>(numRuns);
+        prepareRandomInputRunList(ctx, pageSize, numRuns, numFramesPerRun,
+                minRecordSize, maxRecordSize, readerList, frameList, keyValueMapList);
+
+        RunMergingFrameReader reader = new RunMergingFrameReader(ctx, readerList, frameList, SortFields, Comparators,
+                null, RecordDesc);
+        testMergeSucceed(ctx, reader, keyValueMapList);
+    }
+
+    @Test
+    public void testNormalRunMerge() throws HyracksDataException {
+
+        int pageSize = 128;
+        int numRuns = 2;
+        int numFramesPerRun = 2;
+        int minRecordSize = pageSize / 10;
+        int maxRecordSize = pageSize / 8;
+
+        IHyracksTaskContext ctx = testUtils.create(pageSize);
+        List<Map<Integer, String>> keyValueMapList = new ArrayList<>(numRuns);
+        List<TestFrameReader> readerList = new ArrayList<>(numRuns);
+        List<IFrame> frameList = new ArrayList<>(numRuns);
+        prepareRandomInputRunList(ctx, pageSize, numRuns, numFramesPerRun,
+                minRecordSize, maxRecordSize, readerList, frameList, keyValueMapList);
+
+        RunMergingFrameReader reader = new RunMergingFrameReader(ctx, readerList, frameList, SortFields, Comparators,
+                null, RecordDesc);
+        testMergeSucceed(ctx, reader, keyValueMapList);
+    }
+
+    @Test
+    public void testNormalRunMergeWithTopK() throws HyracksDataException {
+
+        int pageSize = 128;
+        int numRuns = 2;
+        int numFramesPerRun = 2;
+        int minRecordSize = pageSize / 10;
+        int maxRecordSize = pageSize / 8;
+
+        for (int topK = 1; topK < pageSize * numRuns * numFramesPerRun / maxRecordSize / 2; topK++) {
+            IHyracksTaskContext ctx = testUtils.create(pageSize);
+            List<Map<Integer, String>> keyValueMapList = new ArrayList<>(numRuns);
+            List<TestFrameReader> readerList = new ArrayList<>(numRuns);
+            List<IFrame> frameList = new ArrayList<>(numRuns);
+            prepareRandomInputRunList(ctx, pageSize, numRuns, numFramesPerRun,
+                    minRecordSize, maxRecordSize, readerList, frameList, keyValueMapList);
+
+            RunMergingFrameReader reader = new RunMergingFrameReader(ctx, readerList, frameList, SortFields,
+                    Comparators,
+                    null, RecordDesc, topK);
+            int totoalCount = testMergeSucceedInner(ctx, reader, keyValueMapList);
+            int newCount = 0;
+            for (Map<Integer, String> x : keyValueMapList) {
+                newCount += x.size();
+            }
+            assertEquals(topK + newCount, totoalCount);
+        }
+    }
+
+    private void testMergeSucceed(IHyracksTaskContext ctx, RunMergingFrameReader reader,
+            List<Map<Integer, String>> keyValueMapList) throws HyracksDataException {
+
+        testMergeSucceedInner(ctx, reader, keyValueMapList);
+        assertAllKeyValueIsConsumed(keyValueMapList);
+        reader.close();
+    }
+
+    private int testMergeSucceedInner(IHyracksTaskContext ctx, RunMergingFrameReader reader,
+            List<Map<Integer, String>> keyValueMapList) throws HyracksDataException {
+
+        IFrame frame = new VSizeFrame(ctx);
+        reader.open();
+        int count = 0;
+        for (int i = 0; i < keyValueMapList.size(); i++) {
+            keyValueMapList.set(i, new TreeMap<>(keyValueMapList.get(i)));
+            count += keyValueMapList.get(i).size();
+        }
+        while (reader.nextFrame(frame)) {
+            assertFrameIsSorted(frame, keyValueMapList);
+        }
+        return count;
+    }
+
+    @Test
+    public void testOneLargeRunMerge() throws HyracksDataException {
+        int pageSize = 64;
+        int numRuns = 2;
+        int numFramesPerRun = 1;
+        int minRecordSize = pageSize / 10;
+        int maxRecordSize = pageSize / 8;
+
+        IHyracksTaskContext ctx = testUtils.create(pageSize);
+        List<Map<Integer, String>> keyValueMap = new ArrayList<>();
+        List<TestFrameReader> readerList = new ArrayList<>();
+        List<IFrame> frameList = new ArrayList<>();
+        prepareRandomInputRunList(ctx, pageSize, numRuns, numFramesPerRun,
+                minRecordSize, maxRecordSize, readerList, frameList, keyValueMap);
+
+        minRecordSize = pageSize;
+        maxRecordSize = pageSize;
+        numFramesPerRun = 4;
+        prepareRandomInputRunList(ctx, pageSize, numRuns, numFramesPerRun,
+                minRecordSize, maxRecordSize, readerList, frameList, keyValueMap);
+
+        minRecordSize = pageSize * 2;
+        maxRecordSize = pageSize * 2;
+        numFramesPerRun = 6;
+        prepareRandomInputRunList(ctx, pageSize, numRuns, numFramesPerRun,
+                minRecordSize, maxRecordSize, readerList, frameList, keyValueMap);
+
+        RunMergingFrameReader reader = new RunMergingFrameReader(ctx, readerList, frameList, SortFields,
+                Comparators,
+                null,
+                RecordDesc);
+        testMergeSucceed(ctx, reader, keyValueMap);
+    }
+
+    @Test
+    public void testRunFileReader() throws HyracksDataException {
+        int pageSize = 128;
+        int numRuns = 4;
+        int numFramesPerRun = 4;
+        int minRecordSize = pageSize / 10;
+        int maxRecordSize = pageSize / 2;
+
+        IHyracksTaskContext ctx = testUtils.create(pageSize);
+        ExternalSortRunGenerator runGenerator = new ExternalSortRunGenerator(ctx, SortFields,
+                null, ComparatorFactories, RecordDesc, Algorithm.MERGE_SORT,
+                numFramesPerRun);
+
+        runGenerator.open();
+        Map<Integer, String> keyValuePair = new HashMap<>();
+        List<IFrame> frameList = new ArrayList<>();
+        prepareData(ctx, frameList, pageSize * numFramesPerRun * numRuns, minRecordSize, maxRecordSize,
+                null, keyValuePair);
+        for (IFrame frame : frameList) {
+            runGenerator.nextFrame(frame.getBuffer());
+        }
+
+        numFramesPerRun = 2;
+        minRecordSize = pageSize;
+        maxRecordSize = pageSize;
+        frameList.clear();
+        prepareData(ctx, frameList, pageSize * numFramesPerRun * numRuns, minRecordSize, maxRecordSize,
+                null, keyValuePair);
+        for (IFrame frame : frameList) {
+            runGenerator.nextFrame(frame.getBuffer());
+        }
+
+        runGenerator.close();
+        List<IFrame> inFrame = new ArrayList<>(runGenerator.getRuns().size());
+        for (RunAndMaxFrameSizePair max : runGenerator.getRuns()) {
+            inFrame.add(new GroupVSizeFrame(ctx, max.maxFrameSize));
+        }
+        matchResult(ctx, runGenerator.getRuns(), keyValuePair);
+        List<IFrameReader> runs = new ArrayList<>();
+        for (RunAndMaxFrameSizePair run : runGenerator.getRuns()) {
+            runs.add(run.run);
+        }
+        RunMergingFrameReader reader = new RunMergingFrameReader(ctx, runs, inFrame, SortFields, Comparators, null,
+                RecordDesc);
+
+        IFrame outFrame = new VSizeFrame(ctx);
+        reader.open();
+        while (reader.nextFrame(outFrame)) {
+            assertFrameIsSorted(outFrame, Arrays.asList(keyValuePair));
+        }
+        reader.close();
+        assertAllKeyValueIsConsumed(Arrays.asList(keyValuePair));
+    }
+
+    private void assertAllKeyValueIsConsumed(List<Map<Integer, String>> keyValueMapList) {
+        for (Map<Integer, String> map : keyValueMapList) {
+            assertTrue(map.isEmpty());
+        }
+    }
+
+    private void assertFrameIsSorted(IFrame frame, List<Map<Integer, String>> keyValueMapList)
+            throws HyracksDataException {
+        FrameTupleAccessor fta = new FrameTupleAccessor(RecordDesc);
+
+        ByteBufferInputStream bbis = new ByteBufferInputStream();
+        DataInputStream di = new DataInputStream(bbis);
+
+        fta.reset(frame.getBuffer());
+        //        fta.prettyPrint();
+        int preKey = Integer.MIN_VALUE;
+        for (int i = 0; i < fta.getTupleCount(); i++) {
+            bbis.setByteBuffer(fta.getBuffer(),
+                    fta.getTupleStartOffset(i) + fta.getFieldStartOffset(i, 0) + fta.getFieldSlotsLength());
+            int key = (int) RecordDesc.getFields()[0].deserialize(di);
+            bbis.setByteBuffer(fta.getBuffer(),
+                    fta.getTupleStartOffset(i) + fta.getFieldStartOffset(i, 1) + fta.getFieldSlotsLength());
+            String value = (String) RecordDesc.getFields()[1].deserialize(di);
+
+            boolean found = false;
+            for (Map<Integer, String> map : keyValueMapList) {
+                if (map.containsKey(key) && map.get(key).equals(value)) {
+                    found = true;
+                    map.remove(key);
+                    break;
+                }
+            }
+            assertTrue(found);
+            assertTrue(preKey <= key);
+            preKey = key;
+        }
+    }
+
+    static void prepareRandomInputRunList(IHyracksTaskContext ctx, int pageSize, int numRuns,
+            int numFramesPerRun, int minRecordSize, int maxRecordSize,
+            List<TestFrameReader> readerList, List<IFrame> frameList, List<Map<Integer, String>> keyValueMap)
+            throws HyracksDataException {
+        for (int i = 0; i < numRuns; i++) {
+            readerList.add(new TestFrameReader(pageSize, numFramesPerRun, minRecordSize, maxRecordSize));
+            frameList.add(new VSizeFrame(ctx, readerList.get(readerList.size() - 1).maxFrameSize));
+            keyValueMap.add(readerList.get(readerList.size() - 1).result);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb-hyracks/blob/0d87a57f/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/TopKRunGeneratorTest.java
----------------------------------------------------------------------
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/TopKRunGeneratorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/TopKRunGeneratorTest.java
new file mode 100644
index 0000000..ae0397b
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/unit/TopKRunGeneratorTest.java
@@ -0,0 +1,208 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ *  Licensed under the Apache License, Version 2.0 (the "License");
+ *  you may not use this file except in compliance with the License.
+ *  you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.unit;
+
+import static edu.uci.ics.hyracks.tests.unit.AbstractRunGeneratorTest.ComparatorFactories;
+import static edu.uci.ics.hyracks.tests.unit.AbstractRunGeneratorTest.RecordDesc;
+import static edu.uci.ics.hyracks.tests.unit.AbstractRunGeneratorTest.SerDers;
+import static edu.uci.ics.hyracks.tests.unit.AbstractRunGeneratorTest.SortFields;
+import static edu.uci.ics.hyracks.tests.unit.AbstractRunGeneratorTest.assertFTADataIsSorted;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.FixedSizeFrame;
+import edu.uci.ics.hyracks.api.comm.IFrame;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.std.sort.AbstractSortRunGenerator;
+import edu.uci.ics.hyracks.dataflow.std.sort.HybridTopKSortRunGenerator;
+import edu.uci.ics.hyracks.dataflow.std.sort.HeapSortRunGenerator;
+
+public class TopKRunGeneratorTest {
+
+    static final int PAGE_SIZE = 512;
+    static final int NUM_PAGES = 80;
+    static final int SORT_FRAME_LIMIT = 4;
+
+    enum ORDER {
+        INORDER,
+        REVERSE
+    }
+
+    public class InMemorySortDataValidator implements IFrameWriter {
+
+        InMemorySortDataValidator(Map<Integer, String> answer) {
+            this.answer = answer;
+        }
+
+        Map<Integer, String> answer;
+        FrameTupleAccessor accessor;
+        int preKey = Integer.MIN_VALUE;
+
+        @Override
+        public void open() throws HyracksDataException {
+            accessor = new FrameTupleAccessor(RecordDesc);
+            preKey = Integer.MIN_VALUE;
+        }
+
+        @Override
+        public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+            accessor.reset(buffer);
+            preKey = assertFTADataIsSorted(accessor, answer, preKey);
+        }
+
+        @Override
+        public void fail() throws HyracksDataException {
+
+        }
+
+        @Override
+        public void close() throws HyracksDataException {
+            assertTrue(answer.isEmpty());
+        }
+    }
+
+    @Test
+    public void testReverseOrderedDataShouldNotGenerateAnyRuns() throws HyracksDataException {
+        int topK = 1;
+        IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
+        HeapSortRunGenerator sorter = new HeapSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
+                SortFields, null, ComparatorFactories, RecordDesc);
+
+        testInMemoryOnly(ctx, topK, ORDER.REVERSE, sorter);
+    }
+
+    @Test
+    public void testAlreadySortedDataShouldNotGenerateAnyRuns() throws HyracksDataException {
+        int topK = SORT_FRAME_LIMIT;
+        IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
+        HeapSortRunGenerator sorter = new HeapSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
+                SortFields, null, ComparatorFactories, RecordDesc);
+
+        testInMemoryOnly(ctx, topK, ORDER.INORDER, sorter);
+    }
+
+    @Test
+    public void testHybridTopKShouldNotGenerateAnyRuns() throws HyracksDataException {
+        int topK = 1;
+        IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
+        AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
+                SortFields, null, ComparatorFactories, RecordDesc);
+
+        testInMemoryOnly(ctx, topK, ORDER.REVERSE, sorter);
+    }
+
+    @Test
+    public void testHybridTopKShouldSwitchToFrameSorterWhenFlushed() {
+        int topK = 1;
+        IHyracksTaskContext ctx = AbstractRunGeneratorTest.testUtils.create(PAGE_SIZE);
+        AbstractSortRunGenerator sorter = new HybridTopKSortRunGenerator(ctx, SORT_FRAME_LIMIT, topK,
+                SortFields, null, ComparatorFactories, RecordDesc);
+
+    }
+
+    private void testInMemoryOnly(IHyracksTaskContext ctx, int topK, ORDER order, AbstractSortRunGenerator sorter)
+            throws HyracksDataException {
+        Map<Integer, String> keyValuePair = null;
+        switch (order) {
+            case INORDER:
+                keyValuePair = new TreeMap<>();
+                break;
+            case REVERSE:
+                keyValuePair = new TreeMap<>(Collections.reverseOrder());
+                break;
+        }
+
+        List<IFrame> frameList = new ArrayList<>();
+        int minDataSize = PAGE_SIZE * NUM_PAGES * 4 / 5;
+        int minRecordSize = 16;
+        int maxRecordSize = 64;
+
+        AbstractRunGeneratorTest
+                .prepareData(ctx, frameList, minDataSize, minRecordSize, maxRecordSize, null, keyValuePair);
+
+        assert topK > 0;
+
+        ByteBuffer buffer = prepareSortedData(keyValuePair);
+
+        Map<Integer, String> topKAnswer = getTopKAnswer(keyValuePair, topK);
+
+        doSort(sorter, buffer);
+
+        assertEquals(0, sorter.getRuns().size());
+        validateResult(sorter, topKAnswer);
+    }
+
+    private void validateResult(AbstractSortRunGenerator sorter, Map<Integer, String> topKAnswer)
+            throws HyracksDataException {
+
+        InMemorySortDataValidator validator = new InMemorySortDataValidator(topKAnswer);
+        validator.open();
+        sorter.getSorter().flush(validator);
+        validator.close();
+    }
+
+    private void doSort(AbstractSortRunGenerator sorter, ByteBuffer buffer) throws HyracksDataException {
+
+        sorter.open();
+        sorter.nextFrame(buffer);
+        sorter.close();
+    }
+
+    private Map<Integer, String> getTopKAnswer(Map<Integer, String> keyValuePair, int topK) {
+
+        TreeMap<Integer, String> copy = new TreeMap<>(keyValuePair);
+
+        Map<Integer, String> answer = new TreeMap<>();
+        for (Map.Entry<Integer, String> entry : copy.entrySet()) {
+            if (answer.size() < topK) {
+                answer.put(entry.getKey(), entry.getValue());
+            } else {
+                break;
+            }
+        }
+        return answer;
+    }
+
+    private ByteBuffer prepareSortedData(Map<Integer, String> keyValuePair) throws HyracksDataException {
+        ByteBuffer buffer = ByteBuffer.allocate(PAGE_SIZE * NUM_PAGES);
+        IFrame inputFrame = new FixedSizeFrame(buffer);
+        FrameTupleAppender appender = new FrameTupleAppender();
+        appender.reset(inputFrame, true);
+        ArrayTupleBuilder builder = new ArrayTupleBuilder(RecordDesc.getFieldCount());
+
+        for (Map.Entry<Integer, String> entry : keyValuePair.entrySet()) {
+            builder.reset();
+            builder.addField(SerDers[0], entry.getKey());
+            builder.addField(SerDers[1], entry.getValue());
+            appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize());
+        }
+        return buffer;
+    }
+}


Mime
View raw message