flink-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Commented] (FLINK-2613) Print usage information for Scala Shell
Date Thu, 01 Oct 2015 10:26:26 GMT

    [ https://issues.apache.org/jira/browse/FLINK-2613?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14939641#comment-14939641
] 

ASF GitHub Bot commented on FLINK-2613:
---------------------------------------

Github user nikste commented on a diff in the pull request:

    https://github.com/apache/flink/pull/1106#discussion_r40898989
  
    --- Diff: flink-staging/flink-scala-shell/src/main/scala/org/apache/flink/api/scala/FlinkShell.scala
---
    @@ -0,0 +1,148 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + *     http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.flink.api.scala
    +
    +
    +import java.io.{StringWriter, BufferedReader}
    +
    +import scala.tools.nsc.Settings
    +
    +import org.apache.flink.configuration.Configuration
    +import org.apache.flink.runtime.minicluster.LocalFlinkMiniCluster
    +
    +import scala.tools.nsc.interpreter._
    +
    +
    +object FlinkShell {
    +
    +  val LOCAL = 0;
    +  val REMOTE = 1;
    +  val UNDEFINED = -1;
    +
    +  var bufferedReader: BufferedReader = null;
    +
    +  def main(args: Array[String]) {
    +
    +    // scopt, command line arguments
    +    case class Config(
    +        port: Int = -1,
    +        host: String = "none",
    +        externalJars: Option[Array[String]] = None,
    +        flinkShellExecutionMode : Int = UNDEFINED)
    +
    +    val parser = new scopt.OptionParser[Config]("start-scala-shell.sh") {
    +      head ("Flink Scala Shell")
    +
    +      cmd("local") action {
    +        (_,c) => c.copy( host = "none", port = -1, flinkShellExecutionMode = LOCAL)
    +      } text("starts Flink scala shell with a local Flink cluster\n") children(
    +        opt[(String)] ("addclasspath") abbr("a") valueName("<path/to/jar>") action
{
    +          case (x,c) =>
    +            val xArray = x.split(":")
    +            c.copy(externalJars = Option(xArray))
    +          } text("specifies additional jars to be used in Flink\n")
    +        )
    +
    +      cmd("remote") action { (_, c) =>
    +        c.copy(flinkShellExecutionMode = REMOTE)
    +      } text("starts Flink scala shell connecting to a remote cluster\n") children(
    +        arg[String]("<host>") action { (h, c) =>
    +          c.copy(host = h) }
    +          text("remote host name as string"),
    +        arg[Int]("<port>") action { (p, c) =>
    +          c.copy(port = p) }
    +          text("remote port as integer\n"),
    +        opt[(String)] ("addclasspath") abbr("a")  valueName("<path/to/jar>") action
{
    +          case (x,c) =>
    +            val xArray = x.split(":")
    +            c.copy(externalJars = Option(xArray))
    +          } text("specifies additional jars to be used in Flink")
    +
    +      )
    +      help("help") abbr("h") text("prints this usage text\n")
    +    }
    +
    +
    +    // parse arguments
    +    parser.parse (args, Config () ) match {
    +      case Some(config) =>
    +        startShell(config.host,
    +          config.port,
    +          config.flinkShellExecutionMode,
    +          config.externalJars)
    +
    +      case _ => println("Could not parse program arguments")
    +    }
    +  }
    +
    +
    +  def startShell(
    +      userHost : String, 
    +      userPort : Int,
    +      executionMode : Int,
    +      externalJars : Option[Array[String]] = None): Unit ={
    +    
    +    println("Starting Flink Shell:")
    +
    +    var cluster: LocalFlinkMiniCluster = null
    +
    +    // either port or userhost not specified by user, create new minicluster
    +    val (host,port) = if (executionMode == LOCAL) {
    +      cluster = new LocalFlinkMiniCluster(new Configuration, false)
    --- End diff --
    
    I was not aware of that, I would suggest, it should start up with slots for all cores
available.
    What do you think? And how would this be done easiest?



> Print usage information for Scala Shell
> ---------------------------------------
>
>                 Key: FLINK-2613
>                 URL: https://issues.apache.org/jira/browse/FLINK-2613
>             Project: Flink
>          Issue Type: Improvement
>          Components: Scala Shell
>    Affects Versions: 0.10
>            Reporter: Maximilian Michels
>            Assignee: Nikolaas Steenbergen
>            Priority: Minor
>              Labels: starter
>             Fix For: 0.10
>
>
> The Scala Shell startup script starts a {{FlinkMiniCluster}} by default if invoked with
no arguments.
> We should add a {{--help}} or {{-h}} option to make it easier for people to find out
how to configure remote execution. Alternatively, we could print a notice on the local startup
explaining how to start the shell in remote mode.



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Mime
View raw message