flink-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From se...@apache.org
Subject [1/2] flink git commit: [FLINK-1649] [runtime] Give a good error message when a user emits an unsupported null value
Date Thu, 05 Mar 2015 13:08:36 GMT
Repository: flink
Updated Branches:
  refs/heads/master bcb3daf03 -> eae2166dd


[FLINK-1649] [runtime] Give a good error message when a user emits an unsupported null value

This closes #456


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/482766e9
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/482766e9
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/482766e9

Branch: refs/heads/master
Commit: 482766e949d69e282ed862bd97f2a8378b2f699e
Parents: bcb3daf
Author: Stephan Ewen <sewen@apache.org>
Authored: Wed Mar 4 20:36:33 2015 +0100
Committer: Stephan Ewen <sewen@apache.org>
Committed: Thu Mar 5 13:26:55 2015 +0100

----------------------------------------------------------------------
 .../operators/shipping/OutputCollector.java     | 61 ++++++---------
 .../flink/test/misc/NullValuesITCase.java       | 82 ++++++++++++++++++++
 2 files changed, 104 insertions(+), 39 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/482766e9/flink-runtime/src/main/java/org/apache/flink/runtime/operators/shipping/OutputCollector.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/shipping/OutputCollector.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/shipping/OutputCollector.java
index 56723df..3526e96 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/operators/shipping/OutputCollector.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/operators/shipping/OutputCollector.java
@@ -16,7 +16,6 @@
  * limitations under the License.
  */
 
-
 package org.apache.flink.runtime.operators.shipping;
 
 import java.io.IOException;
@@ -30,13 +29,13 @@ import org.apache.flink.runtime.plugable.SerializationDelegate;
 import org.apache.flink.util.Collector;
 
 /**
- * The OutputCollector collects records, and emits the pair to a set of Nephele {@link RecordWriter}s.
+ * The OutputCollector collects records, and emits them to the  {@link RecordWriter}s.
  * The OutputCollector tracks to which writers a deep-copy must be given and which not.
  */
-public class OutputCollector<T> implements Collector<T>
-{	
+public class OutputCollector<T> implements Collector<T> {
+
 	// list of writers
-	protected RecordWriter<SerializationDelegate<T>>[] writers;
+	private final RecordWriter<SerializationDelegate<T>>[] writers;
 
 	private final SerializationDelegate<T> delegate;
 
@@ -49,50 +48,33 @@ public class OutputCollector<T> implements Collector<T>
 	 * @param writers List of all writers.
 	 */
 	@SuppressWarnings("unchecked")
-	public OutputCollector(List<RecordWriter<SerializationDelegate<T>>> writers,
TypeSerializer<T> serializer)
-	{
+	public OutputCollector(List<RecordWriter<SerializationDelegate<T>>> writers,
TypeSerializer<T> serializer) {
 		this.delegate = new SerializationDelegate<T>(serializer);
 		this.writers = (RecordWriter<SerializationDelegate<T>>[]) writers.toArray(new
RecordWriter[writers.size()]);
 	}
-	
-	/**
-	 * Adds a writer to the OutputCollector.
-	 * 
-	 * @param writer The writer to add.
-	 */
-
-	@SuppressWarnings("unchecked")
-	public void addWriter(RecordWriter<SerializationDelegate<T>> writer)
-	{
-		// avoid using the array-list here to reduce one level of object indirection
-		if (this.writers == null) {
-			this.writers = new RecordWriter[] {writer};
-		}
-		else {
-			RecordWriter<SerializationDelegate<T>>[] ws = new RecordWriter[this.writers.length
+ 1];
-			System.arraycopy(this.writers, 0, ws, 0, this.writers.length);
-			ws[this.writers.length] = writer;
-			this.writers = ws;
-		}
-	}
 
 	/**
 	 * Collects a record and emits it to all writers.
 	 */
 	@Override
-	public void collect(T record)
-	{
-		this.delegate.setInstance(record);
-		try {
-			for (int i = 0; i < writers.length; i++) {
-				this.writers[i].emit(this.delegate);
+	public void collect(T record)  {
+		if (record != null) {
+			this.delegate.setInstance(record);
+			try {
+				for (RecordWriter<SerializationDelegate<T>> writer : writers) {
+					writer.emit(this.delegate);
+				}
+			}
+			catch (IOException e) {
+				throw new RuntimeException("Emitting the record caused an I/O exception: " + e.getMessage(),
e);
+			}
+			catch (InterruptedException e) {
+				throw new RuntimeException("Emitting the record was interrupted: " + e.getMessage(),
e);
 			}
 		}
-		catch (IOException e) {
-			throw new RuntimeException("Emitting the record caused an I/O exception: " + e.getMessage(),
e);
-		}
-		catch (InterruptedException e) {
-			throw new RuntimeException("Emitting the record was interrupted: " + e.getMessage(), e);
+		else {
+			throw new NullPointerException("The system does not support records that are null."
+								+ "Null values are only supported as fields inside other objects.");
 		}
 	}
 
@@ -111,6 +93,7 @@ public class OutputCollector<T> implements Collector<T>
 	 * List of writers that are associated with this output collector
 	 * @return list of writers
 	 */
+	@SuppressWarnings("unchecked")
 	public List<RecordWriter<SerializationDelegate<T>>> getWriters() {
 		return Collections.unmodifiableList(Arrays.asList(this.writers));
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/482766e9/flink-tests/src/test/java/org/apache/flink/test/misc/NullValuesITCase.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/misc/NullValuesITCase.java b/flink-tests/src/test/java/org/apache/flink/test/misc/NullValuesITCase.java
new file mode 100644
index 0000000..2087b63
--- /dev/null
+++ b/flink-tests/src/test/java/org/apache/flink/test/misc/NullValuesITCase.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.test.misc;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.ExecutionEnvironment;
+import org.apache.flink.client.program.ProgramInvocationException;
+import org.apache.flink.configuration.ConfigConstants;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.core.fs.FileSystem;
+import org.apache.flink.test.util.ForkableFlinkMiniCluster;
+
+import org.junit.Test;
+
+import static org.junit.Assert.*;
+
+/**
+ * Tests how the system behaves when null records are passed through the system.
+ */
+@SuppressWarnings("serial")
+public class NullValuesITCase {
+
+	@Test
+	public void testNullValues() {
+		ForkableFlinkMiniCluster cluster = null;
+		try {
+			Configuration config = new Configuration();
+			config.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 7);
+			cluster = new ForkableFlinkMiniCluster(config, false);
+
+			ExecutionEnvironment env =
+					ExecutionEnvironment.createRemoteEnvironment("localhost", cluster.getJobManagerRPCPort());
+
+			env.setDegreeOfParallelism(1);
+
+			DataSet<String> data = env.fromElements("hallo")
+					.map(new MapFunction<String, String>() {
+						@Override
+						public String map(String value) throws Exception {
+							return null;
+						}
+					});
+			data.writeAsText("/tmp/myTest", FileSystem.WriteMode.OVERWRITE);
+
+			try {
+				env.execute();
+				fail("this should fail due to null values.");
+			}
+			catch (ProgramInvocationException e) {
+				assertNotNull(e.getCause());
+				assertNotNull(e.getCause().getCause());
+				assertTrue(e.getCause().getCause() instanceof NullPointerException);
+			}
+		}
+		catch (Exception e) {
+			e.printStackTrace();
+			fail(e.getMessage());
+		}
+		finally {
+			if (cluster != null) {
+				cluster.shutdown();
+			}
+		}
+	}
+}


Mime
View raw message