spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From r...@apache.org
Subject spark git commit: [SPARK-6592][SQL] fix filter for scaladoc to generate API doc for Row class under catalyst dir
Date Mon, 30 Mar 2015 18:54:59 GMT
Repository: spark
Updated Branches:
  refs/heads/branch-1.3 93a71663a -> f9d4efa72


[SPARK-6592][SQL] fix filter for scaladoc to generate API doc for Row class under catalyst
dir

https://issues.apache.org/jira/browse/SPARK-6592

The current impl in SparkBuild.scala filter all classes under catalyst directory, however,
we have a corner case that Row class is a public API under that directory

we need to include Row into the scaladoc while still excluding other classes of catalyst project

Thanks for the help on this patch from rxin and liancheng

Author: CodingCat <zhunansjtu@gmail.com>

Closes #5252 from CodingCat/SPARK-6592 and squashes the following commits:

02098a4 [CodingCat] ignore collection, enable types (except those protected classes)
f7af2cb [CodingCat] commit
3ab4403 [CodingCat] fix filter for scaladoc to generate API doc for Row.scala under catalyst
directory

(cherry picked from commit 32259c671ab419f4c8a6ba8e2f7d676c5dfd0f4f)
Signed-off-by: Reynold Xin <rxin@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f9d4efa7
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f9d4efa7
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f9d4efa7

Branch: refs/heads/branch-1.3
Commit: f9d4efa72acd567ea12ca2d47cfa59ea8121fd57
Parents: 93a7166
Author: CodingCat <zhunansjtu@gmail.com>
Authored: Mon Mar 30 11:54:44 2015 -0700
Committer: Reynold Xin <rxin@databricks.com>
Committed: Mon Mar 30 11:54:57 2015 -0700

----------------------------------------------------------------------
 project/SparkBuild.scala                            | 16 ++++++++--------
 .../spark/sql/types/DataTypeConversions.scala       |  2 +-
 .../org/apache/spark/sql/types/DataTypeParser.scala |  2 +-
 3 files changed, 10 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f9d4efa7/project/SparkBuild.scala
----------------------------------------------------------------------
diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 4f17df5..2082691 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -361,15 +361,15 @@ object Unidoc {
     packages
       .map(_.filterNot(_.getName.contains("$")))
       .map(_.filterNot(_.getCanonicalPath.contains("akka")))
-      .map(_.filterNot(_.getCanonicalPath.contains("deploy")))
-      .map(_.filterNot(_.getCanonicalPath.contains("network")))
-      .map(_.filterNot(_.getCanonicalPath.contains("shuffle")))
-      .map(_.filterNot(_.getCanonicalPath.contains("executor")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/deploy")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/shuffle")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/executor")))
       .map(_.filterNot(_.getCanonicalPath.contains("python")))
-      .map(_.filterNot(_.getCanonicalPath.contains("collection")))
-      .map(_.filterNot(_.getCanonicalPath.contains("sql/catalyst")))
-      .map(_.filterNot(_.getCanonicalPath.contains("sql/execution")))
-      .map(_.filterNot(_.getCanonicalPath.contains("sql/hive/test")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/collection")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/execution")))
+      .map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive/test")))
   }
 
   lazy val settings = scalaJavaUnidocSettings ++ Seq (

http://git-wip-us.apache.org/repos/asf/spark/blob/f9d4efa7/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
index c243be0..a9d63e7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeConversions.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.ScalaReflection
 import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
 
 
-protected[sql] object DataTypeConversions {
+private[sql] object DataTypeConversions {
 
   def productToRow(product: Product, schema: StructType): Row = {
     val mutableRow = new GenericMutableRow(product.productArity)

http://git-wip-us.apache.org/repos/asf/spark/blob/f9d4efa7/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
index 89278f7..34270d0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataTypeParser.scala
@@ -112,4 +112,4 @@ private[sql] object DataTypeParser {
 }
 
 /** The exception thrown from the [[DataTypeParser]]. */
-protected[sql] class DataTypeException(message: String) extends Exception(message)
+private[sql] class DataTypeException(message: String) extends Exception(message)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message