flink-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From hsapu...@apache.org
Subject flink git commit: Remove semicolon in Scala files. Fix java doc alignment in AggregatorsITCase.java
Date Thu, 29 Jan 2015 22:06:22 GMT
Repository: flink
Updated Branches:
  refs/heads/master c28d96cd6 -> 9849990d9


Remove semicolon in Scala files. Fix java doc alignment in AggregatorsITCase.java


Project: http://git-wip-us.apache.org/repos/asf/flink/repo
Commit: http://git-wip-us.apache.org/repos/asf/flink/commit/9849990d
Tree: http://git-wip-us.apache.org/repos/asf/flink/tree/9849990d
Diff: http://git-wip-us.apache.org/repos/asf/flink/diff/9849990d

Branch: refs/heads/master
Commit: 9849990d98b6a823dfa869c8d8a9b3aa46fa2ff5
Parents: c28d96c
Author: Henry Saputra <henry.saputra@gmail.com>
Authored: Thu Jan 29 13:51:37 2015 -0800
Committer: Henry Saputra <henry.saputra@gmail.com>
Committed: Thu Jan 29 13:51:37 2015 -0800

----------------------------------------------------------------------
 .../api/scala/StreamExecutionEnvironment.scala  |  2 +-
 .../apache/flink/runtime/akka/AkkaUtils.scala   |  2 +-
 .../flink/runtime/taskmanager/TaskManager.scala | 16 ++++++++--------
 .../runtime/jobmanager/JobManagerITCase.scala   |  2 +-
 .../apache/flink/runtime/jobmanager/Tasks.scala | 20 ++++++++++----------
 .../api/scala/typeutils/CaseClassTypeInfo.scala |  4 ++--
 .../aggregators/AggregatorsITCase.java          |  4 ++--
 .../apache/flink/yarn/ApplicationClient.scala   |  2 +-
 .../apache/flink/yarn/ApplicationMaster.scala   |  2 +-
 .../org/apache/flink/yarn/YarnJobManager.scala  |  2 +-
 10 files changed, 28 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
----------------------------------------------------------------------
diff --git a/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
b/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
index 08c7fa3..b2f79ad 100644
--- a/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
+++ b/flink-addons/flink-streaming/flink-streaming-scala/src/main/scala/org/apache/flink/streaming/api/scala/StreamExecutionEnvironment.scala
@@ -210,7 +210,7 @@ class StreamExecutionEnvironment(javaEnv: JavaEnv) {
    * executed.
    *
    */
-  def getExecutionPlan() = javaEnv.getStreamGraph().getStreamingPlanAsJSON();
+  def getExecutionPlan() = javaEnv.getStreamGraph.getStreamingPlanAsJSON
 
 }
 

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-runtime/src/main/scala/org/apache/flink/runtime/akka/AkkaUtils.scala
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/scala/org/apache/flink/runtime/akka/AkkaUtils.scala b/flink-runtime/src/main/scala/org/apache/flink/runtime/akka/AkkaUtils.scala
index 1f2791c..e6e865e 100644
--- a/flink-runtime/src/main/scala/org/apache/flink/runtime/akka/AkkaUtils.scala
+++ b/flink-runtime/src/main/scala/org/apache/flink/runtime/akka/AkkaUtils.scala
@@ -33,7 +33,7 @@ object AkkaUtils {
   val LOG = LoggerFactory.getLogger(AkkaUtils.getClass)
 
   val DEFAULT_TIMEOUT: FiniteDuration =
-    FiniteDuration(ConfigConstants.DEFAULT_AKKA_ASK_TIMEOUT, TimeUnit.SECONDS);
+    FiniteDuration(ConfigConstants.DEFAULT_AKKA_ASK_TIMEOUT, TimeUnit.SECONDS)
 
   val INF_TIMEOUT = 21474835 seconds
 

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
----------------------------------------------------------------------
diff --git a/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
b/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
index 5d2d21d..34c3a6d 100644
--- a/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
+++ b/flink-runtime/src/main/scala/org/apache/flink/runtime/taskmanager/TaskManager.scala
@@ -117,7 +117,7 @@ import scala.collection.JavaConverters._
   }
 
   if (log.isInfoEnabled) {
-    log.info(TaskManager.getMemoryUsageStatsAsString(ManagementFactory.getMemoryMXBean()));
+    log.info(TaskManager.getMemoryUsageStatsAsString(ManagementFactory.getMemoryMXBean()))
   }
 
   var libraryCacheManager: LibraryCacheManager = null
@@ -343,7 +343,7 @@ import scala.collection.JavaConverters._
       if (log.isDebugEnabled) {
         startRegisteringTask = System.currentTimeMillis()
       }
-      libraryCacheManager.registerTask(jobID, executionID, tdd.getRequiredJarFiles());
+      libraryCacheManager.registerTask(jobID, executionID, tdd.getRequiredJarFiles())
 
       if (log.isDebugEnabled) {
         log.debug("Register task {} took {}s", executionID,
@@ -822,12 +822,12 @@ object TaskManager {
           throw new Exception(s"Temporary file directory ${file.getAbsolutePath} is not writable.")
         }
 
-        if (LOG.isInfoEnabled()) {
-          val totalSpaceGb = file.getTotalSpace() >>  30;
-          val usableSpaceGb = file.getUsableSpace() >> 30;
-          val usablePercentage = usableSpaceGb.asInstanceOf[Double] / totalSpaceGb * 100;
+        if (LOG.isInfoEnabled) {
+          val totalSpaceGb = file.getTotalSpace >>  30
+          val usableSpaceGb = file.getUsableSpace >> 30
+          val usablePercentage = usableSpaceGb.asInstanceOf[Double] / totalSpaceGb * 100
 
-          val path = file.getAbsolutePath()
+          val path = file.getAbsolutePath
 
           LOG.info(f"Temporary file directory '$path': total $totalSpaceGb GB," +
             f"usable $usableSpaceGb GB [$usablePercentage%.2f%% usable])")
@@ -838,7 +838,7 @@ object TaskManager {
   }
 
   private def getMemoryUsageStatsAsString(memoryMXBean: MemoryMXBean): String = {
-    val heap = memoryMXBean.getHeapMemoryUsage()
+    val heap = memoryMXBean.getHeapMemoryUsage
     val nonHeap = memoryMXBean.getNonHeapMemoryUsage
 
     val heapUsed = heap.getUsed >> 20

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/JobManagerITCase.scala
----------------------------------------------------------------------
diff --git a/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/JobManagerITCase.scala
b/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/JobManagerITCase.scala
index 4ca3d19..36f0f92 100644
--- a/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/JobManagerITCase.scala
+++ b/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/JobManagerITCase.scala
@@ -301,7 +301,7 @@ WordSpecLike with Matchers with BeforeAndAfterAll {
 
       val jobGraph = new JobGraph("Forwarding Job", sender, forwarder, receiver)
 
-      jobGraph.setScheduleMode(ScheduleMode.ALL);
+      jobGraph.setScheduleMode(ScheduleMode.ALL)
 
       val cluster = TestingUtils.startTestingCluster(num_tasks, 1)
       val jm = cluster.getJobManager

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/Tasks.scala
----------------------------------------------------------------------
diff --git a/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/Tasks.scala
b/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/Tasks.scala
index 334de11..c2fd543 100644
--- a/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/Tasks.scala
+++ b/flink-runtime/src/test/scala/org/apache/flink/runtime/jobmanager/Tasks.scala
@@ -54,7 +54,7 @@ object Tasks {
   class Sender extends AbstractInvokable{
     var writer: RecordWriter[IntegerRecord] = _
     override def registerInputOutput(): Unit = {
-      writer = new RecordWriter[IntegerRecord](getEnvironment().getWriter(0))
+      writer = new RecordWriter[IntegerRecord](getEnvironment.getWriter(0))
     }
 
     override def invoke(): Unit = {
@@ -79,13 +79,13 @@ object Tasks {
     override def invoke(): Unit = {
       try {
         while (true) {
-          val record = reader.next();
+          val record = reader.next()
 
           if (record == null) {
-            return;
+            return
           }
 
-          writer.emit(record);
+          writer.emit(record)
         }
 
         writer.flush()
@@ -156,7 +156,7 @@ object Tasks {
     var reader: RecordReader[IntegerRecord] = _
 
     override def registerInputOutput(): Unit = {
-      val env = getEnvironment()
+      val env = getEnvironment
 
       reader = new RecordReader[IntegerRecord](env.getReader(0), classOf[IntegerRecord])
     }
@@ -171,7 +171,7 @@ object Tasks {
     var reader2: RecordReader[IntegerRecord] = _
 
     override def registerInputOutput(): Unit = {
-      val env = getEnvironment()
+      val env = getEnvironment
 
       reader1 = new RecordReader[IntegerRecord](env.getReader(0), classOf[IntegerRecord])
       reader2 = new RecordReader[IntegerRecord](env.getReader(1), classOf[IntegerRecord])
@@ -189,7 +189,7 @@ object Tasks {
     var reader3: RecordReader[IntegerRecord] = _
 
     override def registerInputOutput(): Unit = {
-      val env = getEnvironment()
+      val env = getEnvironment
 
       reader1 = new RecordReader[IntegerRecord](env.getReader(0), classOf[IntegerRecord])
       reader2 = new RecordReader[IntegerRecord](env.getReader(1), classOf[IntegerRecord])
@@ -219,7 +219,7 @@ object Tasks {
     var writer: RecordWriter[IntegerRecord] = _
 
     override def registerInputOutput(): Unit = {
-      writer = new RecordWriter[IntegerRecord](getEnvironment().getWriter(0))
+      writer = new RecordWriter[IntegerRecord](getEnvironment.getWriter(0))
     }
 
     override def invoke(): Unit = {
@@ -234,7 +234,7 @@ object Tasks {
 
   class ExceptionReceiver extends AbstractInvokable {
     override def registerInputOutput(): Unit = {
-      new RecordReader[IntegerRecord](getEnvironment().getReader(0), classOf[IntegerRecord])
+      new RecordReader[IntegerRecord](getEnvironment.getReader(0), classOf[IntegerRecord])
     }
 
     override def invoke(): Unit = {
@@ -258,7 +258,7 @@ object Tasks {
     }
 
     override def registerInputOutput(): Unit = {
-      new RecordWriter[IntegerRecord](getEnvironment().getWriter(0))
+      new RecordWriter[IntegerRecord](getEnvironment.getWriter(0))
     }
 
     override def invoke(): Unit = {

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala
----------------------------------------------------------------------
diff --git a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala
b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala
index e2c3527..b407332 100644
--- a/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala
+++ b/flink-scala/src/main/scala/org/apache/flink/api/scala/typeutils/CaseClassTypeInfo.scala
@@ -113,7 +113,7 @@ abstract class CaseClassTypeInfo[T <: Product](
       field = matcher.group(1)
     }
 
-    val intFieldMatcher = PATTERN_INT_FIELD.matcher(field);
+    val intFieldMatcher = PATTERN_INT_FIELD.matcher(field)
     if(intFieldMatcher.matches()) {
       // convert 0-indexed integer field into 1-indexed name field
       field = "_" + (Integer.valueOf(field) + 1)
@@ -181,7 +181,7 @@ abstract class CaseClassTypeInfo[T <: Product](
     var field = matcher.group(1)
     val tail = matcher.group(3)
 
-    val intFieldMatcher = PATTERN_INT_FIELD.matcher(field);
+    val intFieldMatcher = PATTERN_INT_FIELD.matcher(field)
     if(intFieldMatcher.matches()) {
       // convert 0-indexed integer field into 1-indexed name field
       field = "_" + (Integer.valueOf(field) + 1)

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-tests/src/test/java/org/apache/flink/test/iterative/aggregators/AggregatorsITCase.java
----------------------------------------------------------------------
diff --git a/flink-tests/src/test/java/org/apache/flink/test/iterative/aggregators/AggregatorsITCase.java
b/flink-tests/src/test/java/org/apache/flink/test/iterative/aggregators/AggregatorsITCase.java
index aae7168..63cac17 100644
--- a/flink-tests/src/test/java/org/apache/flink/test/iterative/aggregators/AggregatorsITCase.java
+++ b/flink-tests/src/test/java/org/apache/flink/test/iterative/aggregators/AggregatorsITCase.java
@@ -136,8 +136,8 @@ public class AggregatorsITCase extends MultipleProgramsTestBase {
 	@Test
 	public void testConvergenceCriterionWithParameterForIterate() throws Exception {
 		/*
-				 * Test convergence criterion with parameter for iterate
-				 */
+		 * Test convergence criterion with parameter for iterate
+		 */
 
 		final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
 		env.setDegreeOfParallelism(DOP);

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala b/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
index 22f4c02..73349b2 100644
--- a/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
+++ b/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationClient.scala
@@ -82,7 +82,7 @@ class ApplicationClient
             WAIT_FOR_YARN_INTERVAL, yarnJobManager.get, PollYarnClusterStatus))
         }
         case None => throw new RuntimeException("Registration at JobManager/ApplicationMaster
" +
-          "failed. Job Manager RPC connection has not properly been initialized");
+          "failed. Job Manager RPC connection has not properly been initialized")
       }
     }
     case msg: StopYarnSession => {

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationMaster.scala
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationMaster.scala b/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationMaster.scala
index 7c72ef4..e161124 100644
--- a/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationMaster.scala
+++ b/flink-yarn/src/main/scala/org/apache/flink/yarn/ApplicationMaster.scala
@@ -179,7 +179,7 @@ object ApplicationMaster {
     for(property <- dynamicProperties.asScala){
       configuration.setString(property.f0, property.f1)
     }
-    configuration.setInteger(ConfigConstants.JOB_MANAGER_WEB_PORT_KEY, jobManagerWebPort);
+    configuration.setInteger(ConfigConstants.JOB_MANAGER_WEB_PORT_KEY, jobManagerWebPort)
 
     // set port to 0 to let Akka automatically determine the port.
     implicit val jobManagerSystem = YarnUtils.createActorSystem(hostname, port = 0, configuration)

http://git-wip-us.apache.org/repos/asf/flink/blob/9849990d/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
----------------------------------------------------------------------
diff --git a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
index 7e9570e..7e1ce6f 100644
--- a/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
+++ b/flink-yarn/src/main/scala/org/apache/flink/yarn/YarnJobManager.scala
@@ -119,7 +119,7 @@ trait YarnJobManager extends ActorLogMessages {
       val shipListString = env.get(FlinkYarnClient.ENV_CLIENT_SHIP_FILES)
       val yarnClientUsername = env.get(FlinkYarnClient.ENV_CLIENT_USERNAME)
 
-      val jobManagerWebPort = that.webServer.getServer.getConnectors()(0).getLocalPort;
+      val jobManagerWebPort = that.webServer.getServer.getConnectors()(0).getLocalPort
 
       val rm = AMRMClient.createAMRMClient[ContainerRequest]()
       rm.init(conf)


Mime
View raw message