spark-reviews mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gatorsmile <...@git.apache.org>
Subject [GitHub] spark pull request #15024: [SPARK-17470][SQL] unify path for data source tab...
Date Mon, 19 Sep 2016 05:01:47 GMT
Github user gatorsmile commented on a diff in the pull request:

    https://github.com/apache/spark/pull/15024#discussion_r79328309
  
    --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSource.scala
---
    @@ -507,3 +400,117 @@ case class DataSource(
         }
       }
     }
    +
    +object DataSource {
    +  /** A map to maintain backward compatibility in case we move data sources around. */
    +  private val backwardCompatibilityMap: Map[String, String] = {
    +    val jdbc = classOf[JdbcRelationProvider].getCanonicalName
    +    val json = classOf[JsonFileFormat].getCanonicalName
    +    val parquet = classOf[ParquetFileFormat].getCanonicalName
    +    val csv = classOf[CSVFileFormat].getCanonicalName
    +    val libsvm = "org.apache.spark.ml.source.libsvm.LibSVMFileFormat"
    +    val orc = "org.apache.spark.sql.hive.orc.OrcFileFormat"
    +
    +    Map(
    +      "org.apache.spark.sql.jdbc" -> jdbc,
    +      "org.apache.spark.sql.jdbc.DefaultSource" -> jdbc,
    +      "org.apache.spark.sql.execution.datasources.jdbc.DefaultSource" -> jdbc,
    +      "org.apache.spark.sql.execution.datasources.jdbc" -> jdbc,
    +      "org.apache.spark.sql.json" -> json,
    +      "org.apache.spark.sql.json.DefaultSource" -> json,
    +      "org.apache.spark.sql.execution.datasources.json" -> json,
    +      "org.apache.spark.sql.execution.datasources.json.DefaultSource" -> json,
    +      "org.apache.spark.sql.parquet" -> parquet,
    +      "org.apache.spark.sql.parquet.DefaultSource" -> parquet,
    +      "org.apache.spark.sql.execution.datasources.parquet" -> parquet,
    +      "org.apache.spark.sql.execution.datasources.parquet.DefaultSource" -> parquet,
    +      "org.apache.spark.sql.hive.orc.DefaultSource" -> orc,
    +      "org.apache.spark.sql.hive.orc" -> orc,
    +      "org.apache.spark.ml.source.libsvm.DefaultSource" -> libsvm,
    +      "org.apache.spark.ml.source.libsvm" -> libsvm,
    +      "com.databricks.spark.csv" -> csv
    +    )
    +  }
    +
    +  /**
    +   * Class that were removed in Spark 2.0. Used to detect incompatibility libraries for
Spark 2.0.
    +   */
    +  private val spark2RemovedClasses = Set(
    +    "org.apache.spark.sql.DataFrame",
    +    "org.apache.spark.sql.sources.HadoopFsRelationProvider",
    +    "org.apache.spark.Logging")
    +
    +  /** Given a provider name, look up the data source class definition. */
    +  def lookupDataSource(provider0: String): Class[_] = {
    +    val provider = backwardCompatibilityMap.getOrElse(provider0, provider0)
    +    val provider2 = s"$provider.DefaultSource"
    +    val loader = Utils.getContextOrSparkClassLoader
    +    val serviceLoader = ServiceLoader.load(classOf[DataSourceRegister], loader)
    +
    +    try {
    +      serviceLoader.asScala.filter(_.shortName().equalsIgnoreCase(provider)).toList match
{
    +        // the provider format did not match any given registered aliases
    +        case Nil =>
    +          try {
    +            Try(loader.loadClass(provider)).orElse(Try(loader.loadClass(provider2)))
match {
    +              case Success(dataSource) =>
    +                // Found the data source using fully qualified path
    +                dataSource
    +              case Failure(error) =>
    +                if (provider.toLowerCase == "orc" ||
    +                  provider.startsWith("org.apache.spark.sql.hive.orc")) {
    +                  throw new AnalysisException(
    +                    "The ORC data source must be used with Hive support enabled")
    +                } else if (provider.toLowerCase == "avro" ||
    +                  provider == "com.databricks.spark.avro") {
    +                  throw new AnalysisException(
    +                    s"Failed to find data source: ${provider.toLowerCase}. Please find
an Avro " +
    +                      "package at " +
    +                      "https://cwiki.apache.org/confluence/display/SPARK/Third+Party+Projects")
    +                } else {
    +                  throw new ClassNotFoundException(
    +                    s"Failed to find data source: $provider. Please find packages at
" +
    +                      "https://cwiki.apache.org/confluence/display/SPARK/Third+Party+Projects",
    +                    error)
    +                }
    +            }
    +          } catch {
    +            case e: NoClassDefFoundError => // This one won't be caught by Scala NonFatal
    +              // NoClassDefFoundError's class name uses "/" rather than "." for packages
    +              val className = e.getMessage.replaceAll("/", ".")
    +              if (spark2RemovedClasses.contains(className)) {
    +                throw new ClassNotFoundException(s"$className was removed in Spark 2.0.
" +
    +                  "Please check if your library is compatible with Spark 2.0", e)
    +              } else {
    +                throw e
    +              }
    +          }
    +        case head :: Nil =>
    +          // there is exactly one registered alias
    +          head.getClass
    +        case sources =>
    +          // There are multiple registered aliases for the input
    +          sys.error(s"Multiple sources found for $provider " +
    +            s"(${sources.map(_.getClass.getName).mkString(", ")}), " +
    +            "please specify the fully qualified class name.")
    +      }
    +    } catch {
    +      case e: ServiceConfigurationError if e.getCause.isInstanceOf[NoClassDefFoundError]
=>
    +        // NoClassDefFoundError's class name uses "/" rather than "." for packages
    +        val className = e.getCause.getMessage.replaceAll("/", ".")
    +        if (spark2RemovedClasses.contains(className)) {
    +          throw new ClassNotFoundException(s"Detected an incompatible DataSourceRegister.
" +
    +            "Please remove the incompatible library from classpath or upgrade it. " +
    +            s"Error: ${e.getMessage}", e)
    +        } else {
    +          throw e
    +        }
    +    }
    +  }
    --- End diff --
    
    To the other reviewers, the above source codes are completely identical to the previous
`lookupDataSource`, `backwardCompatibilityMap` and `spark2RemovedClasses`


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org


Mime
View raw message