flink-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From fhue...@apache.org
Subject flink git commit: [Typo] Typo fixes in code comments.
Date Sun, 01 Feb 2015 20:49:42 GMT
Repository: flink
Updated Branches:
  refs/heads/master 389683125 -> 31641a68e


[Typo] Typo fixes in code comments.

This closes #352


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/31641a68
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/31641a68
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/31641a68

Branch: refs/heads/master
Commit: 31641a68ed0718ac4aa04156851f9f66b92dc7f3
Parents: 3896831
Author: mborgmann <moritzborgmann@gmail.com>
Authored: Fri Jan 30 11:52:43 2015 +0100
Committer: Fabian Hueske <fhueske@apache.org>
Committed: Sun Feb 1 21:47:14 2015 +0100

----------------------------------------------------------------------
 .../org/apache/flink/streaming/api/datastream/DataStream.java    | 4 ++--
 .../streaming/api/invokable/operator/GroupedWindowInvokable.java | 2 +-
 .../flink/streaming/api/windowing/extractor/FieldsFromTuple.java | 2 +-
 .../streaming/api/windowing/policy/ActiveTriggerPolicy.java      | 2 +-
 .../src/main/java/org/apache/flink/streaming/state/MapState.java | 2 +-
 .../flink/streaming/api/scala/StreamExecutionEnvironment.scala   | 2 +-
 .../src/main/java/org/apache/flink/compiler/PactCompiler.java    | 4 ++--
 .../src/main/java/org/apache/flink/compiler/plan/PlanNode.java   | 4 ++--
 .../org/apache/flink/compiler/plan/WorksetIterationPlanNode.java | 2 +-
 .../apache/flink/compiler/postpass/GenericFlatTypePostPass.java  | 2 +-
 .../java/org/apache/flink/api/common/io/FileInputFormat.java     | 2 +-
 .../src/main/java/org/apache/flink/types/parser/FieldParser.java | 2 +-
 .../java/org/apache/flink/api/java/ExecutionEnvironment.java     | 2 +-
 .../java/org/apache/flink/api/java/record/io/CsvInputFormat.java | 2 +-
 .../java/org/apache/flink/runtime/profiling/ProfilingUtils.java  | 2 +-
 .../apache/flink/test/recordJobs/kmeans/udfs/PointInFormat.java  | 2 +-
 16 files changed, 19 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
----------------------------------------------------------------------
diff --git a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
index 8e87b27..aed4496 100644
--- a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
+++ b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/datastream/DataStream.java
@@ -411,7 +411,7 @@ public class DataStream<OUT> {
 	 * the iteration head.
 	 * <p>
 	 * By default a DataStream with iteration will never terminate, but the user
-	 * can use the the maxWaitTime parameter to set a max waiting time for the
+	 * can use the maxWaitTime parameter to set a max waiting time for the
 	 * iteration head. If no data received in the set time, the stream
 	 * terminates.
 	 * 
@@ -436,7 +436,7 @@ public class DataStream<OUT> {
 	 * the iteration head.
 	 * <p>
 	 * By default a DataStream with iteration will never terminate, but the user
-	 * can use the the maxWaitTime parameter to set a max waiting time for the
+	 * can use the maxWaitTime parameter to set a max waiting time for the
 	 * iteration head. If no data received in the set time, the stream
 	 * terminates.
 	 * 

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/invokable/operator/GroupedWindowInvokable.java
----------------------------------------------------------------------
diff --git a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/invokable/operator/GroupedWindowInvokable.java
b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/invokable/operator/GroupedWindowInvokable.java
index a46fa96..997463c 100644
--- a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/invokable/operator/GroupedWindowInvokable.java
+++ b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/invokable/operator/GroupedWindowInvokable.java
@@ -475,7 +475,7 @@ public class GroupedWindowInvokable<IN, OUT> extends StreamInvokable<IN,
OUT> {
 	}
 
 	/**
-	 * This callback class allows to handle the the callbacks done by threads
+	 * This callback class allows to handle the callbacks done by threads
 	 * defined in active trigger policies
 	 * 
 	 * @see ActiveTriggerPolicy#createActiveTriggerRunnable(ActiveTriggerCallback)

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/extractor/FieldsFromTuple.java
----------------------------------------------------------------------
diff --git a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/extractor/FieldsFromTuple.java
b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/extractor/FieldsFromTuple.java
index afd0421..1bfc461 100644
--- a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/extractor/FieldsFromTuple.java
+++ b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/extractor/FieldsFromTuple.java
@@ -20,7 +20,7 @@ package org.apache.flink.streaming.api.windowing.extractor;
 import org.apache.flink.api.java.tuple.Tuple;
 
 /**
- * Extracts one or more fields of the the type Double from a tuple and puts them
+ * Extracts one or more fields of the type Double from a tuple and puts them
  * into a new double[]
  */
 public class FieldsFromTuple implements Extractor<Tuple, double[]> {

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/policy/ActiveTriggerPolicy.java
----------------------------------------------------------------------
diff --git a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/policy/ActiveTriggerPolicy.java
b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/policy/ActiveTriggerPolicy.java
index 414250c..abe5298 100644
--- a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/policy/ActiveTriggerPolicy.java
+++ b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/api/windowing/policy/ActiveTriggerPolicy.java
@@ -26,7 +26,7 @@ import org.apache.flink.streaming.api.windowing.helper.Timestamp;
  * 1) Whenever an element arrives at the invokable, the
  * {@link ActiveTriggerPolicy#preNotifyTrigger(Object)} method gets called
  * first. It can return zero ore more fake data points which will be added
- * before the the currently arrived real element gets processed. This allows to
+ * before the currently arrived real element gets processed. This allows to
  * handle empty windows in time based windowing with an user defined
  * {@link Timestamp}. Triggers are not called on fake datapoint. A fake
  * datapoint is always considered as triggered.

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/state/MapState.java
----------------------------------------------------------------------
diff --git a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/state/MapState.java
b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/state/MapState.java
index 74cf57e..85aec52 100644
--- a/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/state/MapState.java
+++ b/flink-addons/flink-streaming/flink-streaming-core/src/main/java/org/apache/flink/streaming/state/MapState.java
@@ -33,7 +33,7 @@ import org.apache.flink.streaming.state.checkpoint.StateCheckpoint;
  * </br> The MapState also allows for incremental (data efficient) checkpointing
  * of the state. The entries in the map should only be modified by using the
  * dedicated methods: {@link #put(Object, Object)},{@link #remove(Object)},
- * {@link #putAll(Map)} and {@link #clear}. Directly modifying the the entryset
+ * {@link #putAll(Map)} and {@link #clear}. Directly modifying the entryset
  * will cause errors when checkpointing.
  *
  * @param <K>

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
----------------------------------------------------------------------
diff --git a/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
b/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
index b2f79ad..394673c 100644
--- a/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
+++ b/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
@@ -85,7 +85,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) {
    * Creates a DataStream that contains the contents of file created while
    * system watches the given path. The file will be read with the system's
    * default character set. The user can check the monitoring interval in milliseconds,
-   * and the the way file modifications are handled. By default it checks for only new files
+   * and the way file modifications are handled. By default it checks for only new files
    * every 100 milliseconds.
    *
    */

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-compiler/src/main/java/org/apache/flink/compiler/PactCompiler.java
----------------------------------------------------------------------
diff --git a/flink-compiler/src/main/java/org/apache/flink/compiler/PactCompiler.java b/flink-compiler/src/main/java/org/apache/flink/compiler/PactCompiler.java
index 5126135..6d6bcc7 100644
--- a/flink-compiler/src/main/java/org/apache/flink/compiler/PactCompiler.java
+++ b/flink-compiler/src/main/java/org/apache/flink/compiler/PactCompiler.java
@@ -227,7 +227,7 @@ public class PactCompiler {
 	
 	/**
 	 * Value for the local strategy compiler hint that enforces a <b>sort merge based</b>
local strategy.
-	 * The the first input is sorted, the second input is assumed to be sorted. After sorting
both inputs are merged. 
+	 * The first input is sorted, the second input is assumed to be sorted. After sorting both
inputs are merged.
 	 * For example, a <i>Match</i> or <i>CoGroup</i> operator will use
a sort-merge strategy to find pairs 
 	 * of matching keys.
 	 * 
@@ -237,7 +237,7 @@ public class PactCompiler {
 	
 	/**
 	 * Value for the local strategy compiler hint that enforces a <b>sort merge based</b>
local strategy.
-	 * The the second input is sorted, the first input is assumed to be sorted. After sorting
both inputs are merged. 
+	 * The second input is sorted, the first input is assumed to be sorted. After sorting both
inputs are merged.
 	 * For example, a <i>Match</i> or <i>CoGroup</i> operator will use
a sort-merge strategy to find pairs 
 	 * of matching keys.
 	 * 

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
----------------------------------------------------------------------
diff --git a/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java b/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
index ef69860..f2375fd 100644
--- a/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
+++ b/flink-compiler/src/main/java/org/apache/flink/compiler/plan/PlanNode.java
@@ -101,7 +101,7 @@ public abstract class PlanNode implements Visitable<PlanNode>, DumpableNode<Plan
 	}
 	
 	protected void mergeBranchPlanMaps(Map<OptimizerNode, PlanNode> branchPlan1, Map<OptimizerNode,
PlanNode> branchPlan2) {
-		// merge the branchPlan maps according the the template's uncloseBranchesStack
+		// merge the branchPlan maps according the template's uncloseBranchesStack
 		if (this.template.hasUnclosedBranches()) {
 			if (this.branchPlan == null) {
 				this.branchPlan = new HashMap<OptimizerNode, PlanNode>(8);
@@ -238,7 +238,7 @@ public abstract class PlanNode implements Visitable<PlanNode>, DumpableNode<Plan
 	}
 
 	/**
-	 * Gets the cumulative costs of this nose. The cumulative costs are the the sum of the costs
+	 * Gets the cumulative costs of this nose. The cumulative costs are the sum of the costs
 	 * of this node and of all nodes in the subtree below this node.
 	 * 
 	 * @return The cumulative costs, or null, if not yet set.

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-compiler/src/main/java/org/apache/flink/compiler/plan/WorksetIterationPlanNode.java
----------------------------------------------------------------------
diff --git a/flink-compiler/src/main/java/org/apache/flink/compiler/plan/WorksetIterationPlanNode.java
b/flink-compiler/src/main/java/org/apache/flink/compiler/plan/WorksetIterationPlanNode.java
index 47e9b69..6565c97 100644
--- a/flink-compiler/src/main/java/org/apache/flink/compiler/plan/WorksetIterationPlanNode.java
+++ b/flink-compiler/src/main/java/org/apache/flink/compiler/plan/WorksetIterationPlanNode.java
@@ -207,7 +207,7 @@ public class WorksetIterationPlanNode extends DualInputPlanNode implements
Itera
 		Map<OptimizerNode, PlanNode> branchPlan1 = input1.getSource().branchPlan;
 		Map<OptimizerNode, PlanNode> branchPlan2 = input2.getSource().branchPlan;
 
-		// merge the branchPlan maps according the the template's uncloseBranchesStack
+		// merge the branchPlan maps according the template's uncloseBranchesStack
 		if (this.template.hasUnclosedBranches()) {
 			if (this.branchPlan == null) {
 				this.branchPlan = new HashMap<OptimizerNode, PlanNode>(8);

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-compiler/src/main/java/org/apache/flink/compiler/postpass/GenericFlatTypePostPass.java
----------------------------------------------------------------------
diff --git a/flink-compiler/src/main/java/org/apache/flink/compiler/postpass/GenericFlatTypePostPass.java
b/flink-compiler/src/main/java/org/apache/flink/compiler/postpass/GenericFlatTypePostPass.java
index 5da215f..cfa201f 100644
--- a/flink-compiler/src/main/java/org/apache/flink/compiler/postpass/GenericFlatTypePostPass.java
+++ b/flink-compiler/src/main/java/org/apache/flink/compiler/postpass/GenericFlatTypePostPass.java
@@ -501,7 +501,7 @@ public abstract class GenericFlatTypePostPass<X, T extends AbstractSchema<X>>
im
 			}
 		}
 		
-		// propagate the the channel's source model
+		// propagate the channel's source model
 		traverse(channel.getSource(), schema, createUtilities);
 	}
 	

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-core/src/main/java/org/apache/flink/api/common/io/FileInputFormat.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/api/common/io/FileInputFormat.java
b/flink-core/src/main/java/org/apache/flink/api/common/io/FileInputFormat.java
index 71f09b0..ed92eed 100644
--- a/flink-core/src/main/java/org/apache/flink/api/common/io/FileInputFormat.java
+++ b/flink-core/src/main/java/org/apache/flink/api/common/io/FileInputFormat.java
@@ -132,7 +132,7 @@ public abstract class FileInputFormat<OT> implements InputFormat<OT,
FileInputSp
 	protected Path filePath;
 	
 	/**
-	 * The the minimal split size, set by the configure() method.
+	 * The minimal split size, set by the configure() method.
 	 */
 	protected long minSplitSize = 0; 
 	

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-core/src/main/java/org/apache/flink/types/parser/FieldParser.java
----------------------------------------------------------------------
diff --git a/flink-core/src/main/java/org/apache/flink/types/parser/FieldParser.java b/flink-core/src/main/java/org/apache/flink/types/parser/FieldParser.java
index 322f48e..28f9a7a 100644
--- a/flink-core/src/main/java/org/apache/flink/types/parser/FieldParser.java
+++ b/flink-core/src/main/java/org/apache/flink/types/parser/FieldParser.java
@@ -88,7 +88,7 @@ public abstract class FieldParser<T> {
 	/**
 	 * Gets the parsed field. This method returns the value parsed by the last successful invocation
of
 	 * {@link #parseField(byte[], int, int, byte[], Object)}. It objects are mutable and reused,
it will return
-	 * the object instance that was passed the the parse function.
+	 * the object instance that was passed the parse function.
 	 * 
 	 * @return The latest parsed field.
 	 */

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
b/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
index 77fed97..35d8c44 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/ExecutionEnvironment.java
@@ -408,7 +408,7 @@ public abstract class ExecutionEnvironment {
 	 * <p>
 	 * Since all data sets need specific information about their types, this method needs to
determine
 	 * the type of the data produced by the input format. It will attempt to determine the data
type
-	 * by reflection, unless the the input format implements the {@link ResultTypeQueryable}
interface.
+	 * by reflection, unless the input format implements the {@link ResultTypeQueryable} interface.
 	 * In the latter case, this method will invoke the {@link ResultTypeQueryable#getProducedType()}
 	 * method to determine data type produced by the input format.
 	 * 

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-java/src/main/java/org/apache/flink/api/java/record/io/CsvInputFormat.java
----------------------------------------------------------------------
diff --git a/flink-java/src/main/java/org/apache/flink/api/java/record/io/CsvInputFormat.java
b/flink-java/src/main/java/org/apache/flink/api/java/record/io/CsvInputFormat.java
index 5f83a19..4e92874 100644
--- a/flink-java/src/main/java/org/apache/flink/api/java/record/io/CsvInputFormat.java
+++ b/flink-java/src/main/java/org/apache/flink/api/java/record/io/CsvInputFormat.java
@@ -314,7 +314,7 @@ public class CsvInputFormat extends GenericCsvInputFormat<Record>
{
 		/**
 		 * Creates a new builder for the given configuration.
 		 *
-		 * @param contract The contract from which the the compiler hints are used. 
+		 * @param contract The contract from which the compiler hints are used.
 		 *                 If contract is null, new compiler hints are generated.  
 		 * @param config The configuration into which the parameters will be written.
 		 */

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-runtime/src/main/java/org/apache/flink/runtime/profiling/ProfilingUtils.java
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/profiling/ProfilingUtils.java
b/flink-runtime/src/main/java/org/apache/flink/runtime/profiling/ProfilingUtils.java
index d05522a..95211be 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/profiling/ProfilingUtils.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/profiling/ProfilingUtils.java
@@ -46,7 +46,7 @@ public class ProfilingUtils {
 	public static final String ENABLE_PROFILING_KEY = "jobmanager.profiling.enable";
 
 	/**
-	 * The class name of the the job manager's profiling component to load if progiling is enabled.
+	 * The class name of the job manager's profiling component to load if progiling is enabled.
 	 */
 	public static final String JOBMANAGER_CLASSNAME_KEY = "jobmanager.profiling.classname";
 

http://git-wip-us.apache.org/repos/asf/flink/blob/31641a68/flink-tests/src/test/java/org/apache/flink/test/recordJobs/kmeans/udfs/PointInFormat.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/recordJobs/kmeans/udfs/PointInFormat.java
b/flink-tests/src/test/java/org/apache/flink/test/recordJobs/kmeans/udfs/PointInFormat.java
index 0daab92..c5dd8ec 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/recordJobs/kmeans/udfs/PointInFormat.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/recordJobs/kmeans/udfs/PointInFormat.java
@@ -29,7 +29,7 @@ import org.apache.flink.types.Record;
  * Generates records with an id and a and CoordVector.
  * The input format is line-based, i.e. one record is read from one line
  * which is terminated by '\n'. Within a line the first '|' character separates
- * the id from the the CoordVector. The vector consists of a vector of decimals. 
+ * the id from the CoordVector. The vector consists of a vector of decimals.
  * The decimals are separated by '|' as well. The id is the id of a data point or
  * cluster center and the CoordVector the corresponding position (coordinate
  * vector) of the data point or cluster center. Example line:


Mime
View raw message