spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From vanzin <...@git.apache.org>
Subject [GitHub] spark pull request: [SPARK-5124][Core] A standard RPC interface an...
Date Tue, 10 Mar 2015 18:07:08 GMT
Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/4588#discussion_r26148526
  
    --- Diff: core/src/main/scala/org/apache/spark/rpc/RpcEnv.scala ---
    @@ -0,0 +1,373 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one or more
    + * contributor license agreements.  See the NOTICE file distributed with
    + * this work for additional information regarding copyright ownership.
    + * The ASF licenses this file to You under the Apache License, Version 2.0
    + * (the "License"); you may not use this file except in compliance with
    + * the License.  You may obtain a copy of the License at
    + *
    + *    http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +
    +package org.apache.spark.rpc
    +
    +import java.net.URI
    +
    +import scala.concurrent.Future
    +import scala.concurrent.duration.FiniteDuration
    +import scala.reflect.ClassTag
    +
    +import org.apache.spark.{SparkException, SecurityManager, SparkConf}
    +import org.apache.spark.util.Utils
    +
    +/**
    + * An RPC environment. [[RpcEndpoint]]s need to register itself with a name to [[RpcEnv]]
to
    + * receives messages. Then [[RpcEnv]] will process messages sent from [[RpcEndpointRef]]
or remote
    + * nodes, and deliver them to corresponding [[RpcEndpoint]]s.
    + *
    + * [[RpcEnv]] also provides some methods to retrieve [[RpcEndpointRef]]s given name or
uri.
    + */
    +private[spark] trait RpcEnv {
    +
    +  /**
    +   * Return RpcEndpointRef of the registered [[RpcEndpoint]]. Will be used to implement
    +   * [[RpcEndpoint.self]].
    +   */
    +  private[rpc] def endpointRef(endpoint: RpcEndpoint): RpcEndpointRef
    +
    +  /**
    +   * Return the address that [[RpcEnv]] is listening to.
    +   */
    +  def address: RpcAddress
    +
    +  /**
    +   * Register a [[RpcEndpoint]] with a name and return its [[RpcEndpointRef]]. [[RpcEnv]]
does not
    +   * guarantee thread-safety.
    +   */
    +  def setupEndpoint(name: String, endpoint: RpcEndpoint): RpcEndpointRef
    +
    +  /**
    +   * Register a [[RpcEndpoint]] with a name and return its [[RpcEndpointRef]]. [[RpcEnv]]
should
    +   * make sure thread-safely sending messages to [[RpcEndpoint]].
    +   *
    +   * Thread-safety means processing of one message happens before processing of the next
message by
    +   * the same [[RpcEndpoint]]. In the other words, changes to internal fields of a [[RpcEndpoint]]
    +   * are visible when processing the next message, and fields in the [[RpcEndpoint]]
need not be
    +   * volatile or equivalent.
    +   *
    +   * However, there is no guarantee that the same thread will be executing the same [[RpcEndpoint]]
    +   * for different messages.
    +   */
    +  def setupThreadSafeEndpoint(name: String, endpoint: RpcEndpoint): RpcEndpointRef
    +
    +  /**
    +   * Retrieve the [[RpcEndpointRef]] represented by `url`.
    +   */
    +  def setupEndpointRefByUrl(url: String): RpcEndpointRef
    +
    +  /**
    +   * Retrieve the [[RpcEndpointRef]] represented by `systemName`, `address` and `endpointName`
    +   */
    +  def setupEndpointRef(
    +      systemName: String, address: RpcAddress, endpointName: String): RpcEndpointRef
    +
    +  /**
    +   * Stop [[RpcEndpoint]] specified by `endpoint`.
    +   */
    +  def stop(endpoint: RpcEndpointRef): Unit
    +
    +  /**
    +   * Shutdown this [[RpcEnv]] asynchronously. If need to make sure [[RpcEnv]] exits successfully,
    +   * call [[awaitTermination()]] straight after [[shutdown()]].
    +   */
    +  def shutdown(): Unit
    +
    +  /**
    +   * Wait until [[RpcEnv]] exits.
    +   *
    +   * TODO do we need a timeout parameter?
    +   */
    +  def awaitTermination(): Unit
    +
    +  /**
    +   * Create a URI used to create a [[RpcEndpointRef]]. Use this one to create the URI
instead of
    +   * creating it manually because different [[RpcEnv]] may have different formats.
    +   */
    +  def uriOf(systemName: String, address: RpcAddress, endpointName: String): String
    +}
    +
    +private[spark] case class RpcEnvConfig(
    +    conf: SparkConf,
    +    name: String,
    +    host: String,
    +    port: Int,
    +    securityManager: SecurityManager)
    +
    +/**
    + * A RpcEnv implementation must have a companion object with an
    + * `apply(config: RpcEnvConfig): RpcEnv` method so that it can be created via Reflection.
    + *
    + * {{{
    + * object MyCustomRpcEnv {
    + *   def apply(config: RpcEnvConfig): RpcEnv = {
    + *     ...
    + *   }
    + * }
    + * }}}
    + */
    +private[spark] object RpcEnv {
    +
    +  private def getRpcEnvCompanion(conf: SparkConf): AnyRef = {
    +    // Add more RpcEnv implementations here
    +    val rpcEnvNames = Map("akka" -> "org.apache.spark.rpc.akka.AkkaRpcEnv")
    +    val rpcEnvName = conf.get("spark.rpc", "akka")
    +    val rpcEnvClassName = rpcEnvNames.getOrElse(rpcEnvName.toLowerCase, rpcEnvName)
    +    val companion = Class.forName(
    +      rpcEnvClassName + "$", true, Utils.getContextOrSparkClassLoader).getField("MODULE$").get(null)
    --- End diff --
    
    A different, more verbose approach would be to have an `RpcEnvFactory` interface and require
the implementations to have an empty constructor. But that would not be much cleaner than
the above, just a little more type-safe.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message