flink-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hsapu...@apache.org
Subject [1/2] flink git commit: Small cleanups to add space between if-else keyword and parentheses to be consistent as I found them.
Date Fri, 13 Feb 2015 17:14:30 GMT
Repository: flink
Updated Branches:
  refs/heads/master 0a22b71c8 -> 69cba1f15


Small cleanups to add space between if-else keyword and parentheses to be consistent
as I found them.

Also fix some comments typos."


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/12d4802c
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/12d4802c
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/12d4802c

Branch: refs/heads/master
Commit: 12d4802c2e165ed951b77efbe82bee9c72d4c873
Parents: 6bec228
Author: Henry Saputra <henry.saputra@gmail.com>
Authored: Fri Feb 13 09:09:26 2015 -0800
Committer: Henry Saputra <henry.saputra@gmail.com>
Committed: Fri Feb 13 09:09:26 2015 -0800

----------------------------------------------------------------------
 .../org/apache/flink/compiler/plan/PlanNode.java  |  6 +++---
 .../java/org/apache/flink/core/fs/FileSystem.java | 14 +++++++-------
 .../main/java/org/apache/flink/types/Record.java  | 18 +++++++++---------
 .../flink/api/java/io/TextInputFormatTest.java    |  2 +-
 .../api/java/record/io/TextInputFormatTest.java   |  6 +++---
 .../runtime/KryoVersusAvroMinibenchmark.java      | 14 +++++++-------
 .../runtime/accumulators/AccumulatorEvent.java    |  4 ++--
 .../org/apache/flink/runtime/blob/BlobServer.java |  8 ++++----
 .../flink/runtime/executiongraph/Execution.java   |  4 ++--
 .../executiongraph/ExecutionJobVertex.java        |  2 +-
 .../partition/IntermediateResultPartition.java    |  7 +++++--
 .../scheduler/NoResourceAvailableException.java   |  4 ++--
 .../jobmanager/web/JobManagerInfoServlet.java     | 16 ++++++++--------
 .../flink/runtime/util/DataInputDeserializer.java |  4 ++--
 .../flink/runtime/jobmanager/JobManager.scala     |  2 +-
 .../minicluster/LocalFlinkMiniCluster.scala       |  4 ++--
 .../flink/runtime/taskmanager/TaskManager.scala   |  2 +-
 .../runtime/taskmanager/TaskManagerTest.java      |  4 ++--
 .../streaming/util/TestStreamEnvironment.java     |  8 ++++----
 .../test/recordJobs/relational/TPCHQuery1.java    |  5 ++---
 20 files changed, 68 insertions(+), 66 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
----------------------------------------------------------------------
diff --git a/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java b/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
index f2375fd..4f72144 100644
--- a/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
+++ b/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
@@ -248,7 +248,7 @@ public abstract class PlanNode implements Visitable<PlanNode>, DumpableNode<Plan
 	}
 
 	public Costs getCumulativeCostsShare() {
-		if (this.cumulativeCosts == null){
+		if (this.cumulativeCosts == null) {
 			return null;
 		} else {
 			Costs result = cumulativeCosts.clone();
@@ -398,9 +398,9 @@ public abstract class PlanNode implements Visitable<PlanNode>, DumpableNode<Plan
 	}
 
 	public PlanNode getCandidateAtBranchPoint(OptimizerNode branchPoint) {
-		if(branchPlan == null){
+		if (branchPlan == null) {
 			return null;
-		}else{
+		} else {
 			return this.branchPlan.get(branchPoint);
 		}
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java b/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
index 7e76b30..3cfb75f 100644
--- a/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
+++ b/flink-core/src/main/java/org/apache/flink/core/fs/FileSystem.java
@@ -228,7 +228,7 @@ public abstract class FileSystem {
 			if (!FSDIRECTORY.containsKey(uri.getScheme())) {
 				// no build in support for this file system. Falling back to Hadoop's FileSystem impl.
 				Class<?> wrapperClass = getHadoopWrapperClassNameForFileSystem(uri.getScheme());
-				if(wrapperClass != null) {
+				if (wrapperClass != null) {
 					// hadoop has support for the FileSystem
 					FSKey wrappedKey = new FSKey(HADOOP_WRAPPER_SCHEME + "+" + uri.getScheme(), uri.getAuthority());
 					if (CACHE.containsKey(wrappedKey)) {
@@ -249,7 +249,7 @@ public abstract class FileSystem {
 			} else {
 				// we end up here if we have a file system with build-in flink support.
 				String fsClass = FSDIRECTORY.get(uri.getScheme());
-				if(fsClass.equals(HADOOP_WRAPPER_FILESYSTEM_CLASS)) {
+				if (fsClass.equals(HADOOP_WRAPPER_FILESYSTEM_CLASS)) {
 					fs = instantiateHadoopFileSystemWrapper(null);
 				} else {
 					fs = instantiateFileSystem(fsClass);
@@ -303,7 +303,7 @@ public abstract class FileSystem {
 	private static HadoopFileSystemWrapper hadoopWrapper;
 
 	private static Class<?> getHadoopWrapperClassNameForFileSystem(String scheme) {
-		if(hadoopWrapper == null) {
+		if (hadoopWrapper == null) {
 			try {
 				hadoopWrapper = (HadoopFileSystemWrapper) instantiateHadoopFileSystemWrapper(null);
 			} catch (IOException e) {
@@ -664,12 +664,12 @@ public abstract class FileSystem {
 	 * @throws IOException
 	 */
 	public boolean initOutPathDistFS(Path outPath, WriteMode writeMode, boolean createDirectory)
throws IOException {
-		if(!this.isDistributedFS()) {
+		if (!this.isDistributedFS()) {
 			return false;
 		}
 		
 		// check if path exists
-		if(this.exists(outPath)) {
+		if (this.exists(outPath)) {
 			// path exists, check write mode
 			switch(writeMode) {
 			case NO_OVERWRITE:
@@ -692,10 +692,10 @@ public abstract class FileSystem {
 			}
 		}
 		
-		if(createDirectory) {
+		if (createDirectory) {
 			// Output directory needs to be created
 			try {
-				if(!this.exists(outPath)) {
+				if (!this.exists(outPath)) {
 					this.mkdirs(outPath);
 				}
 			} catch(IOException ioe) {

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-core/src/main/java/org/apache/flink/types/Record.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/types/Record.java b/flink-core/src/main/java/org/apache/flink/types/Record.java
index 9899666..218d6ce 100644
--- a/flink-core/src/main/java/org/apache/flink/types/Record.java
+++ b/flink-core/src/main/java/org/apache/flink/types/Record.java
@@ -1519,7 +1519,7 @@ public final class Record implements Value, CopyableValue<Record>
{
 
 		@Override
 		public void skipBytesToRead(int numBytes) throws IOException {
-			if(this.end - this.position < numBytes) {
+			if (this.end - this.position < numBytes) {
 				throw new EOFException("Could not skip " + numBytes + ".");
 			}
 			skipBytes(numBytes);
@@ -1527,26 +1527,26 @@ public final class Record implements Value, CopyableValue<Record>
{
 
 		@Override
 		public int read(byte[] b, int off, int len) throws IOException {
-			if(b == null){
+			if (b == null) {
 				throw new NullPointerException("Byte array b cannot be null.");
 			}
 
-			if(off < 0){
+			if (off < 0) {
 				throw new IndexOutOfBoundsException("Offset cannot be negative.");
 			}
 
-			if(len < 0){
+			if (len < 0) {
 				throw new IndexOutOfBoundsException("Length cannot be negative.");
 			}
 
-			if(b.length - off < len){
+			if (b.length - off < len) {
 				throw new IndexOutOfBoundsException("Byte array does not provide enough space to store
requested data" +
 						".");
 			}
 
-			if(this.position >= this.end){
+			if (this.position >= this.end) {
 				return -1;
-			}else{
+			} else {
 				int toRead = Math.min(this.end-this.position, len);
 				System.arraycopy(this.memory,this.position,b,off,toRead);
 				this.position += toRead;
@@ -1790,14 +1790,14 @@ public final class Record implements Value, CopyableValue<Record>
{
 		public void skipBytesToWrite(int numBytes) throws IOException {
 			int skippedBytes = skipBytes(numBytes);
 
-			if(skippedBytes != numBytes){
+			if (skippedBytes != numBytes) {
 				throw new EOFException("Could not skip " + numBytes + " bytes.");
 			}
 		}
 
 		@Override
 		public void write(DataInputView source, int numBytes) throws IOException {
-			if(numBytes > this.end - this.position){
+			if (numBytes > this.end - this.position) {
 				throw new IOException("Could not write " + numBytes + " bytes since the buffer is full.");
 			}
 

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-java/src/test/java/org/apache/flink/api/java/io/TextInputFormatTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/io/TextInputFormatTest.java
b/flink-java/src/test/java/org/apache/flink/api/java/io/TextInputFormatTest.java
index 42e4d69..62a7cf5 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/io/TextInputFormatTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/io/TextInputFormatTest.java
@@ -142,7 +142,7 @@ public class TextInputFormatTest {
 				result = inputFormat.nextRecord(result);
 				assertNull("The input file is over", result);
 				
-			}else{
+			} else {
 				result = inputFormat.nextRecord("");
 				assertNotNull("Expecting first record here", result);
 				assertEquals(CONTENT, result);

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-java/src/test/java/org/apache/flink/api/java/record/io/TextInputFormatTest.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/record/io/TextInputFormatTest.java
b/flink-java/src/test/java/org/apache/flink/api/java/record/io/TextInputFormatTest.java
index 12e8a61..8ca19cf 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/record/io/TextInputFormatTest.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/record/io/TextInputFormatTest.java
@@ -102,7 +102,7 @@ public class TextInputFormatTest {
 	}
 
 	private void testRemovingTrailingCR(String lineBreaker,String delimiter) {
-		File tempFile=null;
+		File tempFile;
 		
 		String FIRST = "First line";
 		String SECOND = "Second line";
@@ -131,7 +131,7 @@ public class TextInputFormatTest {
 			inputFormat.open(splits[0]);
 			
 			Record r = new Record();
-			if (  (delimiter.equals("\n") && (lineBreaker.equals("\n") || lineBreaker.equals("\r\n")
) ) 
+			if ( (delimiter.equals("\n") && (lineBreaker.equals("\n") || lineBreaker.equals("\r\n")
) )
 					|| (lineBreaker.equals(delimiter)) ){
 
 				assertNotNull("Expecting first record here", inputFormat.nextRecord(r));
@@ -141,7 +141,7 @@ public class TextInputFormatTest {
 				assertEquals(SECOND, r.getField(0, StringValue.class).getValue());
 				
 				assertNull("The input file is over", inputFormat.nextRecord(r));
-			}else{
+			} else {
 				assertNotNull("Expecting first record here", inputFormat.nextRecord(r));
 				assertEquals(CONTENT, r.getField(0, StringValue.class).getValue());
 			}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-java/src/test/java/org/apache/flink/api/java/typeutils/runtime/KryoVersusAvroMinibenchmark.java
----------------------------------------------------------------------
diff --git a/flink-java/src/test/java/org/apache/flink/api/java/typeutils/runtime/KryoVersusAvroMinibenchmark.java
b/flink-java/src/test/java/org/apache/flink/api/java/typeutils/runtime/KryoVersusAvroMinibenchmark.java
index b2ed219..7a8dbbf 100644
--- a/flink-java/src/test/java/org/apache/flink/api/java/typeutils/runtime/KryoVersusAvroMinibenchmark.java
+++ b/flink-java/src/test/java/org/apache/flink/api/java/typeutils/runtime/KryoVersusAvroMinibenchmark.java
@@ -477,33 +477,33 @@ public class KryoVersusAvroMinibenchmark {
 		public void skipBytesToRead(int numBytes) throws IOException {
 			int skippedBytes = skipBytes(numBytes);
 
-			if(skippedBytes < numBytes){
+			if (skippedBytes < numBytes){
 				throw new EOFException("Could not skip " + numBytes +" bytes.");
 			}
 		}
 
 		@Override
 		public int read(byte[] b, int off, int len) throws IOException {
-			if(b == null){
+			if (b == null) {
 				throw new NullPointerException("Byte array b cannot be null.");
 			}
 
-			if(off < 0){
+			if (off < 0) {
 				throw new IndexOutOfBoundsException("Offset cannot be negative.");
 			}
 
-			if(len < 0){
+			if (len < 0) {
 				throw new IndexOutOfBoundsException("Length cannot be negative.");
 			}
 
-			if(b.length - off < len){
+			if (b.length - off < len) {
 				throw new IndexOutOfBoundsException("Byte array does not provide enough space to store
requested data" +
 						".");
 			}
 
-			if(this.position >= this.end){
+			if (this.position >= this.end) {
 				return -1;
-			}else{
+			} else {
 				int toRead = Math.min(this.end-this.position, len);
 				System.arraycopy(this.buffer,this.position,b,off,toRead);
 				this.position += toRead;

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/accumulators/AccumulatorEvent.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/accumulators/AccumulatorEvent.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/accumulators/AccumulatorEvent.java
index 3fed259..fd91d65 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/accumulators/AccumulatorEvent.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/accumulators/AccumulatorEvent.java
@@ -170,9 +170,9 @@ public class AccumulatorEvent implements Serializable {
 			baos.close();
 
 			buffer = baos.toByteArray();
-		}else if(serializedData != null){
+		} else if(serializedData != null) {
 			buffer = serializedData;
-		}else{
+		} else {
 			throw new RuntimeException("The AccumulatorEvent's accumulator is null and there is "
+
 					"no serialized data attached to it.");
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServer.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServer.java b/flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServer.java
index 220d3a9..b27af03 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServer.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/blob/BlobServer.java
@@ -248,16 +248,16 @@ public final class BlobServer extends Thread implements BlobService
{
 	 */
 	@Override
 	public URL getURL(BlobKey requiredBlob) throws IOException {
-		if(requiredBlob == null){
+		if (requiredBlob == null) {
 			throw new IllegalArgumentException("Required BLOB cannot be null.");
 		}
 
 		final File localFile = BlobUtils.getStorageLocation(storageDir, requiredBlob);
 
-		if(!localFile.exists()){
+		if (!localFile.exists()) {
 			throw new FileNotFoundException("File " + localFile.getCanonicalPath() + " does " +
 					"not exist.");
-		}else{
+		} else {
 			return localFile.toURI().toURL();
 		}
 	}
@@ -273,7 +273,7 @@ public final class BlobServer extends Thread implements BlobService {
 	public void delete(BlobKey blobKey) throws IOException {
 		final File localFile = BlobUtils.getStorageLocation(storageDir, blobKey);
 
-		if(localFile.exists()){
+		if (localFile.exists()) {
 			localFile.delete();
 		}
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
index c4e5abf..e23194d 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/Execution.java
@@ -742,9 +742,9 @@ public class Execution implements Serializable {
 
 			@Override
 			public void onComplete(Throwable failure, Object success) throws Throwable {
-				if(failure != null){
+				if (failure != null) {
 					fail(new Exception("Task could not be canceled.", failure));
-				}else{
+				} else {
 					TaskOperationResult result = (TaskOperationResult)success;
 					if(!result.success()){
 						LOG.debug("Cancel task call did not find task. Probably akka message call" +

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionJobVertex.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionJobVertex.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionJobVertex.java
index 0a5a463..9df9bd5 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionJobVertex.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/executiongraph/ExecutionJobVertex.java
@@ -446,7 +446,7 @@ public class ExecutionJobVertex implements Serializable {
 					
 					// tell the graph
 					graph.jobVertexInFinalState(this);
-				}else{
+				} else {
 					numSubtasksInFinalState++;
 				}
 			}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/IntermediateResultPartition.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/IntermediateResultPartition.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/IntermediateResultPartition.java
index 7a987c1..71af7a6 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/IntermediateResultPartition.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/io/network/partition/IntermediateResultPartition.java
@@ -84,7 +84,9 @@ public class IntermediateResultPartition implements BufferPoolOwner {
 
 	private BufferPool bufferPool;
 
-	public IntermediateResultPartition(RuntimeEnvironment environment, int partitionIndex, JobID
jobId, ExecutionAttemptID executionId, IntermediateResultPartitionID partitionId, IntermediateResultPartitionType
partitionType, IntermediateResultPartitionQueue[] partitionQueues, NetworkEnvironment networkEnvironment)
{
+	public IntermediateResultPartition(RuntimeEnvironment environment, int partitionIndex, JobID
jobId,
+			ExecutionAttemptID executionId, IntermediateResultPartitionID partitionId, IntermediateResultPartitionType
partitionType,
+			IntermediateResultPartitionQueue[] partitionQueues, NetworkEnvironment networkEnvironment)
{
 		this.environment = environment;
 		this.partitionIndex = partitionIndex;
 		this.jobId = jobId;
@@ -100,7 +102,8 @@ public class IntermediateResultPartition implements BufferPoolOwner {
 	// ------------------------------------------------------------------------
 
 	public void setBufferPool(BufferPool bufferPool) {
-		checkArgument(bufferPool.getNumberOfRequiredMemorySegments() == getNumberOfQueues(), "Buffer
pool has not enough buffers for this intermediate result.");
+		checkArgument(bufferPool.getNumberOfRequiredMemorySegments() == getNumberOfQueues(),
+				"Buffer pool has not enough buffers for this intermediate result.");
 		checkState(this.bufferPool == null, "Buffer pool has already been set for intermediate
result partition.");
 
 		this.bufferPool = checkNotNull(bufferPool);

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/NoResourceAvailableException.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/NoResourceAvailableException.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/NoResourceAvailableException.java
index ebf37d9..e5cfebb 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/NoResourceAvailableException.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/scheduler/NoResourceAvailableException.java
@@ -63,9 +63,9 @@ public class NoResourceAvailableException extends JobException {
 			return false;
 		}
 
-		if(!(obj instanceof NoResourceAvailableException)){
+		if (!(obj instanceof NoResourceAvailableException)) {
 			return false;
-		}else{
+		} else {
 			return getMessage().equals(((NoResourceAvailableException)obj).getMessage());
 		}
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/web/JobManagerInfoServlet.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/web/JobManagerInfoServlet.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/web/JobManagerInfoServlet.java
index bf2ef70..983738e 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/web/JobManagerInfoServlet.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/jobmanager/web/JobManagerInfoServlet.java
@@ -107,7 +107,7 @@ public class JobManagerInfoServlet extends HttpServlet {
 				if(response instanceof JobFound){
 					ExecutionGraph archivedJob = ((JobFound)response).executionGraph();
 					writeJsonForArchivedJob(resp.getWriter(), archivedJob);
-				}else{
+				} else {
 					LOG.warn("DoGet:job: Could not find job for job ID " + jobId);
 				}
 			}
@@ -118,12 +118,12 @@ public class JobManagerInfoServlet extends HttpServlet {
 				JobResponse response = AkkaUtils.ask(archive,
 						new RequestJob(JobID.fromHexString(jobId)), timeout);
 
-				if(response instanceof JobFound && groupvertexId != null){
+				if (response instanceof JobFound && groupvertexId != null) {
 					ExecutionGraph archivedJob = ((JobFound)response).executionGraph();
 
 					writeJsonForArchivedJobGroupvertex(resp.getWriter(), archivedJob,
 							JobVertexID.fromHexString(groupvertexId));
-				}else{
+				} else {
 					LOG.warn("DoGet:groupvertex: Could not find job for job ID " + jobId);
 				}
 			}
@@ -329,7 +329,7 @@ public class JobManagerInfoServlet extends HttpServlet {
 			AccumulatorResultsResponse response = AkkaUtils.ask(jobmanager,
 					new RequestAccumulatorResults(graph.getJobID()), timeout);
 
-			if(response instanceof AccumulatorResultsFound){
+			if (response instanceof AccumulatorResultsFound) {
 				Map<String, Object> accMap = ((AccumulatorResultsFound)response).asJavaMap();
 
 				wrt.write("\n\"accumulators\": [");
@@ -390,7 +390,7 @@ public class JobManagerInfoServlet extends HttpServlet {
 					wrt.write("}");
 
 				}
-			}else{
+			} else {
 				LOG.warn("Could not find accumulator results for job ID " + graph.getJobID());
 			}
 
@@ -431,9 +431,9 @@ public class JobManagerInfoServlet extends HttpServlet {
 			boolean first = true;
 
 			for(ExecutionGraph g : graphs){
-				if(first){
+				if (first) {
 					first = false;
-				}else{
+				} else {
 					wrt.write(",");
 				}
 
@@ -478,7 +478,7 @@ public class JobManagerInfoServlet extends HttpServlet {
 				wrt.write("]");
 
 				wrt.write("}");
-			}else{
+			} else {
 				wrt.write("\"vertexevents\": [],");
 				wrt.write("\"jobevents\": [");
 				wrt.write("{");

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/java/org/apache/flink/runtime/util/DataInputDeserializer.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/util/DataInputDeserializer.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/util/DataInputDeserializer.java
index 35b6f0d..9915aba 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/util/DataInputDeserializer.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/util/DataInputDeserializer.java
@@ -348,9 +348,9 @@ public class DataInputDeserializer implements DataInputView {
 					".");
 		}
 
-		if(this.position >= this.end){
+		if(this.position >= this.end) {
 			return -1;
-		}else{
+		} else {
 			int toRead = Math.min(this.end-this.position, len);
 			System.arraycopy(this.buffer,this.position,b,off,toRead);
 			this.position += toRead;

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/scala/org/apache/flink/runtime/jobmanager/JobManager.scala
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/scala/org/apache/flink/runtime/jobmanager/JobManager.scala
b/flink-runtime/src/main/scala/org/apache/flink/runtime/jobmanager/JobManager.scala
index 4636999..ba0ba19 100644
--- a/flink-runtime/src/main/scala/org/apache/flink/runtime/jobmanager/JobManager.scala
+++ b/flink-runtime/src/main/scala/org/apache/flink/runtime/jobmanager/JobManager.scala
@@ -371,7 +371,7 @@ Actor with ActorLogMessages with ActorLogging {
    *
    * @param jobGraph representing the Flink job
    * @param listenToEvents true if the sender wants to listen to job status and execution
state
-   *                       change notificatinos. false if not.
+   *                       change notifications. false if not.
    * @param detached true if the job runs in detached mode, meaning that the sender does
not wait
    *                 for the result of the job. false otherwise.
    */

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/scala/org/apache/flink/runtime/minicluster/LocalFlinkMiniCluster.scala
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/scala/org/apache/flink/runtime/minicluster/LocalFlinkMiniCluster.scala
b/flink-runtime/src/main/scala/org/apache/flink/runtime/minicluster/LocalFlinkMiniCluster.scala
index 06d611a..93bde8b 100644
--- a/flink-runtime/src/main/scala/org/apache/flink/runtime/minicluster/LocalFlinkMiniCluster.scala
+++ b/flink-runtime/src/main/scala/org/apache/flink/runtime/minicluster/LocalFlinkMiniCluster.scala
@@ -45,7 +45,7 @@ class LocalFlinkMiniCluster(userConfiguration: Configuration, singleActorSystem:
 
   val jobClientActorSystem = if(singleActorSystem){
     jobManagerActorSystem
-  }else{
+  } else {
     // create an actor system listening on a random port
     AkkaUtils.createDefaultActorSystem()
   }
@@ -88,7 +88,7 @@ class LocalFlinkMiniCluster(userConfiguration: Configuration, singleActorSystem:
 
     val localExecution = if(numTaskManagers == 1){
       true
-    }else{
+    } else {
       false
     }
 

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
b/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
index 1bfe172..e1cedce 100644
--- a/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
+++ b/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
@@ -387,7 +387,7 @@ import scala.collection.JavaConverters._
 
       task.setEnvironment(env)
 
-      // register the task with the network stack and profilers
+      // register the task with the network stack and profiles
       networkEnvironment match {
         case Some(ne) => ne.registerTask(task)
         case None => throw new RuntimeException(

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-runtime/src/test/java/org/apache/flink/runtime/taskmanager/TaskManagerTest.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/test/java/org/apache/flink/runtime/taskmanager/TaskManagerTest.java
b/flink-runtime/src/test/java/org/apache/flink/runtime/taskmanager/TaskManagerTest.java
index d55a67b..28076b5 100644
--- a/flink-runtime/src/test/java/org/apache/flink/runtime/taskmanager/TaskManagerTest.java
+++ b/flink-runtime/src/test/java/org/apache/flink/runtime/taskmanager/TaskManagerTest.java
@@ -510,9 +510,9 @@ public class TaskManagerTest {
 
 		@Override
 		public void onReceive(Object message) throws Exception{
-			if(message instanceof JobManagerMessages.UpdateTaskExecutionState){
+			if (message instanceof JobManagerMessages.UpdateTaskExecutionState) {
 				getSender().tell(false, getSelf());
-			}else{
+			} else {
 				super.onReceive(message);
 			}
 		}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-staging/flink-streaming/flink-streaming-core/src/test/java/org/apache/flink/streaming/util/TestStreamEnvironment.java
----------------------------------------------------------------------
diff --git a/flink-staging/flink-streaming/flink-streaming-core/src/test/java/org/apache/flink/streaming/util/TestStreamEnvironment.java
b/flink-staging/flink-streaming/flink-streaming-core/src/test/java/org/apache/flink/streaming/util/TestStreamEnvironment.java
index 95a5d9b..e1b3ac0 100644
--- a/flink-staging/flink-streaming/flink-streaming-core/src/test/java/org/apache/flink/streaming/util/TestStreamEnvironment.java
+++ b/flink-staging/flink-streaming/flink-streaming-core/src/test/java/org/apache/flink/streaming/util/TestStreamEnvironment.java
@@ -56,16 +56,16 @@ public class TestStreamEnvironment extends StreamExecutionEnvironment
{
 
 		ForkableFlinkMiniCluster cluster = new ForkableFlinkMiniCluster(configuration);
 
-		try{
+		try {
 			ActorRef client = cluster.getJobClient();
 			JobClient.submitJobAndWait(jobGraph, false, client, cluster.timeout());
-		}catch(JobExecutionException e){
+		} catch(JobExecutionException e){
 			if(e.getMessage().contains("GraphConversionException")){
 				throw new Exception(CANNOT_EXECUTE_EMPTY_JOB, e);
-			}else{
+			} else {
 				throw e;
 			}
-		}finally{
+		} finally{
 			cluster.stop();
 		}
 	}

http://git-wip-us.apache.org/repos/asf/flink/blob/12d4802c/flink-tests/src/test/java/org/apache/flink/test/recordJobs/relational/TPCHQuery1.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/recordJobs/relational/TPCHQuery1.java
b/flink-tests/src/test/java/org/apache/flink/test/recordJobs/relational/TPCHQuery1.java
index a3c4c74..3444b47 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/recordJobs/relational/TPCHQuery1.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/recordJobs/relational/TPCHQuery1.java
@@ -44,12 +44,11 @@ public class TPCHQuery1 implements Program, ProgramDescription {
 	public Plan getPlan(String... args) throws IllegalArgumentException {
 		
 		
-		if(args.length != 3)
-		{
+		if (args.length != 3) {
 			this.degreeOfParallelism = 1;
 			this.lineItemInputPath = "";
 			this.outputPath = "";
-		}else{
+		} else {
 			this.degreeOfParallelism = Integer.parseInt(args[0]);
 			this.lineItemInputPath = args[1];
 			this.outputPath = args[2];


Mime
View raw message