spark-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From marmb...@apache.org
Subject spark git commit: [SPARK-9142] [SQL] Removing unnecessary self types in Catalyst.
Date Fri, 17 Jul 2015 22:02:26 GMT
Repository: spark
Updated Branches:
  refs/heads/master 42d8a012f -> b2aa490bb


[SPARK-9142] [SQL] Removing unnecessary self types in Catalyst.

Just a small change to add Product type to the base expression/plan abstract classes, based
on suggestions on #7434 and offline discussions.

Author: Reynold Xin <rxin@databricks.com>

Closes #7479 from rxin/remove-self-types and squashes the following commits:

e407ffd [Reynold Xin] [SPARK-9142][SQL] Removing unnecessary self types in Catalyst.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b2aa490b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b2aa490b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b2aa490b

Branch: refs/heads/master
Commit: b2aa490bb60176631c94ecadf87c14564960f12c
Parents: 42d8a01
Author: Reynold Xin <rxin@databricks.com>
Authored: Fri Jul 17 15:02:13 2015 -0700
Committer: Michael Armbrust <michael@databricks.com>
Committed: Fri Jul 17 15:02:13 2015 -0700

----------------------------------------------------------------------
 .../org/apache/spark/sql/catalyst/analysis/unresolved.scala | 1 -
 .../apache/spark/sql/catalyst/expressions/Expression.scala  | 7 +------
 .../apache/spark/sql/catalyst/expressions/aggregates.scala  | 3 ---
 .../apache/spark/sql/catalyst/expressions/arithmetic.scala  | 1 -
 .../spark/sql/catalyst/expressions/conditionals.scala       | 1 -
 .../apache/spark/sql/catalyst/expressions/generators.scala  | 2 +-
 .../org/apache/spark/sql/catalyst/expressions/math.scala    | 5 ++---
 .../spark/sql/catalyst/expressions/namedExpressions.scala   | 4 ++--
 .../apache/spark/sql/catalyst/expressions/predicates.scala  | 3 ---
 .../org/apache/spark/sql/catalyst/expressions/random.scala  | 1 -
 .../spark/sql/catalyst/expressions/windowExpressions.scala  | 2 --
 .../spark/sql/catalyst/plans/logical/LogicalPlan.scala      | 9 +--------
 .../spark/sql/catalyst/plans/logical/basicOperators.scala   | 2 +-
 .../spark/sql/catalyst/plans/logical/partitioning.scala     | 2 --
 .../scala/org/apache/spark/sql/execution/SparkPlan.scala    | 9 +--------
 .../scala/org/apache/spark/sql/execution/commands.scala     | 2 --
 .../org/apache/spark/sql/parquet/ParquetRelation.scala      | 2 --
 .../org/apache/spark/sql/hive/HiveMetastoreCatalog.scala    | 2 --
 18 files changed, 9 insertions(+), 49 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index 7089f07..4a1a1ed 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -96,7 +96,6 @@ case class UnresolvedFunction(name: String, children: Seq[Expression]) extends
E
  * "SELECT * FROM ...". A [[Star]] gets automatically expanded during analysis.
  */
 abstract class Star extends LeafExpression with NamedExpression {
-  self: Product =>
 
   override def name: String = throw new UnresolvedException(this, "name")
   override def exprId: ExprId = throw new UnresolvedException(this, "exprId")

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index f396bd0..c70b5af 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -43,8 +43,7 @@ import org.apache.spark.sql.types._
  *
  * See [[Substring]] for an example.
  */
-abstract class Expression extends TreeNode[Expression] {
-  self: Product =>
+abstract class Expression extends TreeNode[Expression] with Product {
 
   /**
    * Returns true when an expression is a candidate for static evaluation before the query
is
@@ -187,7 +186,6 @@ abstract class Expression extends TreeNode[Expression] {
  * A leaf expression, i.e. one without any child expressions.
  */
 abstract class LeafExpression extends Expression {
-  self: Product =>
 
   def children: Seq[Expression] = Nil
 }
@@ -198,7 +196,6 @@ abstract class LeafExpression extends Expression {
  * if the input is evaluated to null.
  */
 abstract class UnaryExpression extends Expression {
-  self: Product =>
 
   def child: Expression
 
@@ -277,7 +274,6 @@ abstract class UnaryExpression extends Expression {
  * if any input is evaluated to null.
  */
 abstract class BinaryExpression extends Expression {
-  self: Product =>
 
   def left: Expression
   def right: Expression
@@ -370,7 +366,6 @@ abstract class BinaryExpression extends Expression {
  *    the analyzer will find the tightest common type and do the proper type casting.
  */
 abstract class BinaryOperator extends BinaryExpression with ExpectsInputTypes {
-  self: Product =>
 
   /**
    * Expected input type from both left/right child expressions, similar to the

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
index 71c943d..af9a674 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregates.scala
@@ -27,7 +27,6 @@ import org.apache.spark.sql.types._
 import org.apache.spark.util.collection.OpenHashSet
 
 trait AggregateExpression extends Expression {
-  self: Product =>
 
   /**
    * Aggregate expressions should not be foldable.
@@ -65,7 +64,6 @@ case class SplitEvaluation(
  * These partial evaluations can then be combined to compute the actual answer.
  */
 trait PartialAggregate extends AggregateExpression {
-  self: Product =>
 
   /**
    * Returns a [[SplitEvaluation]] that computes this aggregation using partial aggregation.
@@ -79,7 +77,6 @@ trait PartialAggregate extends AggregateExpression {
  */
 abstract class AggregateFunction
   extends LeafExpression with AggregateExpression with Serializable {
-  self: Product =>
 
   /** Base should return the generic aggregate expression that this function is computing
*/
   val base: AggregateExpression

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 1616d1b..c5960eb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -77,7 +77,6 @@ case class Abs(child: Expression) extends UnaryExpression with ExpectsInputTypes
 }
 
 abstract class BinaryArithmetic extends BinaryOperator {
-  self: Product =>
 
   override def dataType: DataType = left.dataType
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
index 9162b73..15b33da 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionals.scala
@@ -77,7 +77,6 @@ case class If(predicate: Expression, trueValue: Expression, falseValue:
Expressi
 }
 
 trait CaseWhenLike extends Expression {
-  self: Product =>
 
   // Note that `branches` are considered in consecutive pairs (cond, val), and the optional
last
   // element is the value for the default catch-all case (if provided).

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
index 51dc77e..c58a6d3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala
@@ -40,7 +40,7 @@ import org.apache.spark.sql.types._
  * requested.  The attributes produced by this function will be automatically copied anytime
rules
  * result in changes to the Generator or its children.
  */
-trait Generator extends Expression { self: Product =>
+trait Generator extends Expression {
 
   // TODO ideally we should return the type of ArrayType(StructType),
   // however, we don't keep the output field names in the Generator.

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
index 7a543ff..b05a7b3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/math.scala
@@ -34,7 +34,6 @@ import org.apache.spark.unsafe.types.UTF8String
  */
 abstract class LeafMathExpression(c: Double, name: String)
   extends LeafExpression with Serializable {
-  self: Product =>
 
   override def dataType: DataType = DoubleType
   override def foldable: Boolean = true
@@ -58,7 +57,7 @@ abstract class LeafMathExpression(c: Double, name: String)
  * @param name The short name of the function
  */
 abstract class UnaryMathExpression(f: Double => Double, name: String)
-  extends UnaryExpression with Serializable with ImplicitCastInputTypes { self: Product =>
+  extends UnaryExpression with Serializable with ImplicitCastInputTypes {
 
   override def inputTypes: Seq[DataType] = Seq(DoubleType)
   override def dataType: DataType = DoubleType
@@ -92,7 +91,7 @@ abstract class UnaryMathExpression(f: Double => Double, name: String)
  * @param name The short name of the function
  */
 abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String)
-  extends BinaryExpression with Serializable with ImplicitCastInputTypes { self: Product
=>
+  extends BinaryExpression with Serializable with ImplicitCastInputTypes {
 
   override def inputTypes: Seq[DataType] = Seq(DoubleType, DoubleType)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
index 8bf7a7c..c083ac0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
@@ -40,7 +40,7 @@ case class ExprId(id: Long)
 /**
  * An [[Expression]] that is named.
  */
-trait NamedExpression extends Expression { self: Product =>
+trait NamedExpression extends Expression {
 
   /** We should never fold named expressions in order to not remove the alias. */
   override def foldable: Boolean = false
@@ -83,7 +83,7 @@ trait NamedExpression extends Expression { self: Product =>
     }
 }
 
-abstract class Attribute extends LeafExpression with NamedExpression { self: Product =>
+abstract class Attribute extends LeafExpression with NamedExpression {
 
   override def references: AttributeSet = AttributeSet(this)
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
index aa6c30e..7a6fb2b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala
@@ -38,8 +38,6 @@ object InterpretedPredicate {
  * An [[Expression]] that returns a boolean value.
  */
 trait Predicate extends Expression {
-  self: Product =>
-
   override def dataType: DataType = BooleanType
 }
 
@@ -222,7 +220,6 @@ case class Or(left: Expression, right: Expression) extends BinaryOperator
with P
 
 
 abstract class BinaryComparison extends BinaryOperator with Predicate {
-  self: Product =>
 
   override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
     if (ctx.isPrimitiveType(left.dataType)) {

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/random.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/random.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/random.scala
index e10ba55..65093dc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/random.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/random.scala
@@ -33,7 +33,6 @@ import org.apache.spark.util.random.XORShiftRandom
  * Since this expression is stateful, it cannot be a case object.
  */
 abstract class RDG(seed: Long) extends LeafExpression with Serializable {
-  self: Product =>
 
   /**
    * Record ID within each partition. By being transient, the Random Number Generator is

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
index 3443616..c8aa571 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/windowExpressions.scala
@@ -254,8 +254,6 @@ object SpecifiedWindowFrame {
  * to retrieve value corresponding with these n rows.
  */
 trait WindowFunction extends Expression {
-  self: Product =>
-
   def init(): Unit
 
   def reset(): Unit

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index adac372..dd6c5d4 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -25,8 +25,7 @@ import org.apache.spark.sql.catalyst.plans.QueryPlan
 import org.apache.spark.sql.catalyst.trees.TreeNode
 
 
-abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging {
-  self: Product =>
+abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging with Product{
 
   /**
    * Computes [[Statistics]] for this plan. The default implementation assumes the output
@@ -277,8 +276,6 @@ abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging
{
  * A logical plan node with no children.
  */
 abstract class LeafNode extends LogicalPlan {
-  self: Product =>
-
   override def children: Seq[LogicalPlan] = Nil
 }
 
@@ -286,8 +283,6 @@ abstract class LeafNode extends LogicalPlan {
  * A logical plan node with single child.
  */
 abstract class UnaryNode extends LogicalPlan {
-  self: Product =>
-
   def child: LogicalPlan
 
   override def children: Seq[LogicalPlan] = child :: Nil
@@ -297,8 +292,6 @@ abstract class UnaryNode extends LogicalPlan {
  * A logical plan node with a left and right child.
  */
 abstract class BinaryNode extends LogicalPlan {
-  self: Product =>
-
   def left: LogicalPlan
   def right: LogicalPlan
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
index fae3398..fbe104d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/basicOperators.scala
@@ -298,7 +298,7 @@ case class Expand(
 }
 
 trait GroupingAnalytics extends UnaryNode {
-  self: Product =>
+
   def groupByExprs: Seq[Expression]
   def aggregations: Seq[NamedExpression]
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
----------------------------------------------------------------------
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
index 63df2c1..1f76b03 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/partitioning.scala
@@ -24,8 +24,6 @@ import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression,
SortOrd
  * result have expectations about the distribution and ordering of partitioned input data.
  */
 abstract class RedistributeData extends UnaryNode {
-  self: Product =>
-
   override def output: Seq[Attribute] = child.output
 }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
index 632f633..ba12056 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
@@ -39,8 +39,7 @@ object SparkPlan {
  * :: DeveloperApi ::
  */
 @DeveloperApi
-abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Serializable {
-  self: Product =>
+abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with Product with Serializable
{
 
   /**
    * A handle to the SQL Context that was used to create this plan.   Since many operators
need
@@ -239,14 +238,10 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with Logging with
Serializ
 }
 
 private[sql] trait LeafNode extends SparkPlan {
-  self: Product =>
-
   override def children: Seq[SparkPlan] = Nil
 }
 
 private[sql] trait UnaryNode extends SparkPlan {
-  self: Product =>
-
   def child: SparkPlan
 
   override def children: Seq[SparkPlan] = child :: Nil
@@ -255,8 +250,6 @@ private[sql] trait UnaryNode extends SparkPlan {
 }
 
 private[sql] trait BinaryNode extends SparkPlan {
-  self: Product =>
-
   def left: SparkPlan
   def right: SparkPlan
 

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
index 5e9951f..bace3f8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/commands.scala
@@ -35,8 +35,6 @@ import org.apache.spark.sql.{DataFrame, Row, SQLConf, SQLContext}
  * wrapped in `ExecutedCommand` during execution.
  */
 private[sql] trait RunnableCommand extends LogicalPlan with logical.Command {
-  self: Product =>
-
   override def output: Seq[Attribute] = Seq.empty
   override def children: Seq[LogicalPlan] = Seq.empty
   def run(sqlContext: SQLContext): Seq[Row]

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
index e0bea65..086559e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetRelation.scala
@@ -54,8 +54,6 @@ private[sql] case class ParquetRelation(
     partitioningAttributes: Seq[Attribute] = Nil)
   extends LeafNode with MultiInstanceRelation {
 
-  self: Product =>
-
   /** Schema derived from ParquetFile */
   def parquetSchema: MessageType =
     ParquetTypesConverter

http://git-wip-us.apache.org/repos/asf/spark/blob/b2aa490b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
index 4b7a782..6589bc6 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
@@ -596,8 +596,6 @@ private[hive] case class MetastoreRelation
     (@transient sqlContext: SQLContext)
   extends LeafNode with MultiInstanceRelation {
 
-  self: Product =>
-
   override def equals(other: Any): Boolean = other match {
     case relation: MetastoreRelation =>
       databaseName == relation.databaseName &&


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org


Mime
View raw message