Return-Path: X-Original-To: archive-asf-public-internal@cust-asf2.ponee.io Delivered-To: archive-asf-public-internal@cust-asf2.ponee.io Received: from cust-asf.ponee.io (cust-asf.ponee.io [163.172.22.183]) by cust-asf2.ponee.io (Postfix) with ESMTP id 9AFB4200AED for ; Tue, 3 May 2016 22:48:16 +0200 (CEST) Received: by cust-asf.ponee.io (Postfix) id 998621609F4; Tue, 3 May 2016 22:48:16 +0200 (CEST) Delivered-To: archive-asf-public@cust-asf.ponee.io Received: from mail.apache.org (hermes.apache.org [140.211.11.3]) by cust-asf.ponee.io (Postfix) with SMTP id C1D4C1609A9 for ; Tue, 3 May 2016 22:48:15 +0200 (CEST) Received: (qmail 21440 invoked by uid 500); 3 May 2016 20:48:15 -0000 Mailing-List: contact commits-help@spark.apache.org; run by ezmlm Precedence: bulk List-Help: List-Unsubscribe: List-Post: List-Id: Delivered-To: mailing list commits@spark.apache.org Received: (qmail 21431 invoked by uid 99); 3 May 2016 20:48:14 -0000 Received: from git1-us-west.apache.org (HELO git1-us-west.apache.org) (140.211.11.23) by apache.org (qpsmtpd/0.29) with ESMTP; Tue, 03 May 2016 20:48:14 +0000 Received: by git1-us-west.apache.org (ASF Mail Server at git1-us-west.apache.org, from userid 33) id D7D8CDFB3D; Tue, 3 May 2016 20:48:14 +0000 (UTC) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit From: rxin@apache.org To: commits@spark.apache.org Message-Id: X-Mailer: ASF-Git Admin Mailer Subject: spark git commit: [SPARK-15073][SQL] Hide SparkSession constructor from the public Date: Tue, 3 May 2016 20:48:14 +0000 (UTC) archived-at: Tue, 03 May 2016 20:48:16 -0000 Repository: spark Updated Branches: refs/heads/branch-2.0 b67668bc4 -> ca9917160 [SPARK-15073][SQL] Hide SparkSession constructor from the public ## What changes were proposed in this pull request? Users should use the builder pattern instead. ## How was this patch tested? Jenks. Author: Andrew Or Closes #12873 from andrewor14/spark-session-constructor. (cherry picked from commit 588cac414a9cf1e0f40a82cc6a78f77e26825f29) Signed-off-by: Reynold Xin Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ca991716 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ca991716 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ca991716 Branch: refs/heads/branch-2.0 Commit: ca9917160849597a839a8b6b39321435734d936b Parents: b67668b Author: Andrew Or Authored: Tue May 3 13:47:58 2016 -0700 Committer: Reynold Xin Committed: Tue May 3 13:48:12 2016 -0700 ---------------------------------------------------------------------- .../org/apache/spark/examples/sql/RDDRelation.scala | 9 +++------ .../main/scala/org/apache/spark/repl/SparkILoop.scala | 4 ++-- .../src/main/scala/org/apache/spark/repl/Main.scala | 4 ++-- .../scala/org/apache/spark/sql/SparkSession.scala | 14 ++++++++++++-- 4 files changed, 19 insertions(+), 12 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/ca991716/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala ---------------------------------------------------------------------- diff --git a/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala b/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala index 8ce4427..b4118b1 100644 --- a/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala +++ b/examples/src/main/scala/org/apache/spark/examples/sql/RDDRelation.scala @@ -18,7 +18,6 @@ // scalastyle:off println package org.apache.spark.examples.sql -import org.apache.spark.{SparkConf, SparkContext} import org.apache.spark.sql.{SaveMode, SparkSession} // One method for defining the schema of an RDD is to make a case class with the desired column @@ -27,14 +26,12 @@ case class Record(key: Int, value: String) object RDDRelation { def main(args: Array[String]) { - val sparkConf = new SparkConf().setAppName("RDDRelation") - val sc = new SparkContext(sparkConf) - val spark = new SparkSession(sc) + val spark = SparkSession.builder.appName("RDDRelation").getOrCreate() // Importing the SparkSession gives access to all the SQL functions and implicit conversions. import spark.implicits._ - val df = sc.parallelize((1 to 100).map(i => Record(i, s"val_$i"))).toDF() + val df = spark.createDataFrame((1 to 100).map(i => Record(i, s"val_$i"))) // Any RDD containing case classes can be registered as a table. The schema of the table is // automatically inferred using scala reflection. df.registerTempTable("records") @@ -70,7 +67,7 @@ object RDDRelation { parquetFile.registerTempTable("parquetFile") spark.sql("SELECT * FROM parquetFile").collect().foreach(println) - sc.stop() + spark.stop() } } // scalastyle:on println http://git-wip-us.apache.org/repos/asf/spark/blob/ca991716/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala ---------------------------------------------------------------------- diff --git a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala index 6a811ad..c4f6450 100644 --- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala +++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala @@ -1030,10 +1030,10 @@ class SparkILoop( def createSparkSession(): SparkSession = { if (SparkSession.hiveClassesArePresent) { logInfo("Creating Spark session with Hive support") - SparkSession.withHiveSupport(sparkContext) + SparkSession.builder.enableHiveSupport().getOrCreate() } else { logInfo("Creating Spark session") - new SparkSession(sparkContext) + SparkSession.builder.getOrCreate() } } http://git-wip-us.apache.org/repos/asf/spark/blob/ca991716/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala ---------------------------------------------------------------------- diff --git a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala index 8e381ff..a171759 100644 --- a/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala +++ b/repl/scala-2.11/src/main/scala/org/apache/spark/repl/Main.scala @@ -94,10 +94,10 @@ object Main extends Logging { def createSparkSession(): SparkSession = { if (SparkSession.hiveClassesArePresent) { - sparkSession = SparkSession.withHiveSupport(sparkContext) + sparkSession = SparkSession.builder.enableHiveSupport().getOrCreate() logInfo("Created Spark session with Hive support") } else { - sparkSession = new SparkSession(sparkContext) + sparkSession = SparkSession.builder.getOrCreate() logInfo("Created Spark session") } sparkSession http://git-wip-us.apache.org/repos/asf/spark/blob/ca991716/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 3836ce2..aa7c335 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -54,6 +54,7 @@ import org.apache.spark.util.Utils * {{{ * SparkSession.builder() * .master("local") + * .appName("Word Count") * .config("spark.some.config.option", "some-value"). * .getOrCreate() * }}} @@ -63,7 +64,7 @@ class SparkSession private( @transient private val existingSharedState: Option[SharedState]) extends Serializable with Logging { self => - def this(sc: SparkContext) { + private[sql] def this(sc: SparkContext) { this(sc, None) } @@ -573,7 +574,7 @@ class SparkSession private( * common Scala objects into [[DataFrame]]s. * * {{{ - * val sparkSession = new SparkSession(sc) + * val sparkSession = SparkSession.builder.getOrCreate() * import sparkSession.implicits._ * }}} * @@ -586,6 +587,15 @@ class SparkSession private( } // scalastyle:on + /** + * Stop the underlying [[SparkContext]]. + * + * @since 2.0.0 + */ + def stop(): Unit = { + sparkContext.stop() + } + protected[sql] def parseSql(sql: String): LogicalPlan = { sessionState.sqlParser.parsePlan(sql) } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org For additional commands, e-mail: commits-help@spark.apache.org