flink-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ches...@apache.org
Subject [4/4] flink git commit: [FLINK-6659] fix RocksDBMergeIteratorTest leaving temporary data behind
Date Thu, 25 May 2017 11:06:41 GMT
[FLINK-6659] fix RocksDBMergeIteratorTest leaving temporary data behind

-> use a JUnit '@Rule' that does the cleanup


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/0824333e
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/0824333e
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/0824333e

Branch: refs/heads/release-1.3
Commit: 0824333ead39ed383be10e5a1fe13440dbd5fa88
Parents: 0f86dee
Author: Nico Kruber <nico@data-artisans.com>
Authored: Mon May 22 16:47:35 2017 +0200
Committer: zentol <chesnay@apache.org>
Committed: Wed May 24 16:52:02 2017 +0200

----------------------------------------------------------------------
 .../state/RocksDBMergeIteratorTest.java          | 19 +++++++++++++------
 1 file changed, 13 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/0824333e/flink-contrib/flink-statebackend-rocksdb/src/test/java/org/apache/flink/contrib/streaming/state/RocksDBMergeIteratorTest.java
----------------------------------------------------------------------
diff --git a/flink-contrib/flink-statebackend-rocksdb/src/test/java/org/apache/flink/contrib/streaming/state/RocksDBMergeIteratorTest.java
b/flink-contrib/flink-statebackend-rocksdb/src/test/java/org/apache/flink/contrib/streaming/state/RocksDBMergeIteratorTest.java
index 956ef2f..f5bcf86 100644
--- a/flink-contrib/flink-statebackend-rocksdb/src/test/java/org/apache/flink/contrib/streaming/state/RocksDBMergeIteratorTest.java
+++ b/flink-contrib/flink-statebackend-rocksdb/src/test/java/org/apache/flink/contrib/streaming/state/RocksDBMergeIteratorTest.java
@@ -21,16 +21,16 @@ package org.apache.flink.contrib.streaming.state;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.configuration.ConfigConstants;
 import org.apache.flink.core.memory.ByteArrayOutputStreamWithPos;
-import org.apache.flink.runtime.testutils.CommonTestUtils;
 import org.junit.Assert;
+import org.junit.Rule;
 import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
 import org.rocksdb.ColumnFamilyDescriptor;
 import org.rocksdb.ColumnFamilyHandle;
 import org.rocksdb.RocksDB;
 import org.rocksdb.RocksIterator;
 
 import java.io.DataOutputStream;
-import java.io.File;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -43,6 +43,9 @@ public class RocksDBMergeIteratorTest {
 	private static final int NUM_KEY_VAL_STATES = 50;
 	private static final int MAX_NUM_KEYS = 20;
 
+	@Rule
+	public TemporaryFolder tempFolder = new TemporaryFolder();
+
 	@Test
 	public void testEmptyMergeIterator() throws IOException {
 		RocksDBKeyedStateBackend.RocksDBMergeIterator emptyIterator =
@@ -51,19 +54,23 @@ public class RocksDBMergeIteratorTest {
 	}
 
 	@Test
-	public void testMergeIterator() throws Exception {
+	public void testMergeIteratorByte() throws Exception {
 		Assert.assertTrue(MAX_NUM_KEYS <= Byte.MAX_VALUE);
 
 		testMergeIterator(Byte.MAX_VALUE);
+	}
+
+	@Test
+	public void testMergeIteratorShort() throws Exception {
+		Assert.assertTrue(MAX_NUM_KEYS <= Byte.MAX_VALUE);
+
 		testMergeIterator(Short.MAX_VALUE);
 	}
 
 	public void testMergeIterator(int maxParallelism) throws Exception {
 		Random random = new Random(1234);
 
-		File tmpDir = CommonTestUtils.createTempDirectory();
-
-		RocksDB rocksDB = RocksDB.open(tmpDir.getAbsolutePath());
+		RocksDB rocksDB = RocksDB.open(tempFolder.getRoot().getAbsolutePath());
 		try {
 			List<Tuple2<RocksIterator, Integer>> rocksIteratorsWithKVStateId = new ArrayList<>();
 			List<Tuple2<ColumnFamilyHandle, Integer>> columnFamilyHandlesWithKeyCount
= new ArrayList<>();


Mime
View raw message