[SPARK-10584] [DOC] [SQL] Documentation about spark.sql.hive.metastore.version is wrong.
The default value of hive metastore version is 1.2.1 but the documentation says the value of `spark.sql.hive.metastore.version` is 0.13.1. Also, we cannot get the default value by `sqlContext.getConf("spark.sql.hive.metastore.version")`. Author: Kousuke Saruta <sarutak@oss.nttdata.co.jp> Closes #8739 from sarutak/SPARK-10584.
This commit is contained in:
parent
32407bfd2b
commit
cf2821ef5f
|
@ -1687,7 +1687,7 @@ The following options can be used to configure the version of Hive that is used
|
||||||
<tr><th>Property Name</th><th>Default</th><th>Meaning</th></tr>
|
<tr><th>Property Name</th><th>Default</th><th>Meaning</th></tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td><code>spark.sql.hive.metastore.version</code></td>
|
<td><code>spark.sql.hive.metastore.version</code></td>
|
||||||
<td><code>0.13.1</code></td>
|
<td><code>1.2.1</code></td>
|
||||||
<td>
|
<td>
|
||||||
Version of the Hive metastore. Available
|
Version of the Hive metastore. Available
|
||||||
options are <code>0.12.0</code> through <code>1.2.1</code>.
|
options are <code>0.12.0</code> through <code>1.2.1</code>.
|
||||||
|
|
|
@ -111,8 +111,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) with Logging {
|
||||||
* this does not necessarily need to be the same version of Hive that is used internally by
|
* this does not necessarily need to be the same version of Hive that is used internally by
|
||||||
* Spark SQL for execution.
|
* Spark SQL for execution.
|
||||||
*/
|
*/
|
||||||
protected[hive] def hiveMetastoreVersion: String =
|
protected[hive] def hiveMetastoreVersion: String = getConf(HIVE_METASTORE_VERSION)
|
||||||
getConf(HIVE_METASTORE_VERSION, hiveExecutionVersion)
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The location of the jars that should be used to instantiate the HiveMetastoreClient. This
|
* The location of the jars that should be used to instantiate the HiveMetastoreClient. This
|
||||||
|
@ -202,7 +201,7 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) with Logging {
|
||||||
"Builtin jars can only be used when hive execution version == hive metastore version. " +
|
"Builtin jars can only be used when hive execution version == hive metastore version. " +
|
||||||
s"Execution: ${hiveExecutionVersion} != Metastore: ${hiveMetastoreVersion}. " +
|
s"Execution: ${hiveExecutionVersion} != Metastore: ${hiveMetastoreVersion}. " +
|
||||||
"Specify a vaild path to the correct hive jars using $HIVE_METASTORE_JARS " +
|
"Specify a vaild path to the correct hive jars using $HIVE_METASTORE_JARS " +
|
||||||
s"or change $HIVE_METASTORE_VERSION to $hiveExecutionVersion.")
|
s"or change ${HIVE_METASTORE_VERSION.key} to $hiveExecutionVersion.")
|
||||||
}
|
}
|
||||||
|
|
||||||
// We recursively find all jars in the class loader chain,
|
// We recursively find all jars in the class loader chain,
|
||||||
|
@ -606,7 +605,11 @@ private[hive] object HiveContext {
|
||||||
/** The version of hive used internally by Spark SQL. */
|
/** The version of hive used internally by Spark SQL. */
|
||||||
val hiveExecutionVersion: String = "1.2.1"
|
val hiveExecutionVersion: String = "1.2.1"
|
||||||
|
|
||||||
val HIVE_METASTORE_VERSION: String = "spark.sql.hive.metastore.version"
|
val HIVE_METASTORE_VERSION = stringConf("spark.sql.hive.metastore.version",
|
||||||
|
defaultValue = Some(hiveExecutionVersion),
|
||||||
|
doc = "Version of the Hive metastore. Available options are " +
|
||||||
|
s"<code>0.12.0</code> through <code>$hiveExecutionVersion</code>.")
|
||||||
|
|
||||||
val HIVE_METASTORE_JARS = stringConf("spark.sql.hive.metastore.jars",
|
val HIVE_METASTORE_JARS = stringConf("spark.sql.hive.metastore.jars",
|
||||||
defaultValue = Some("builtin"),
|
defaultValue = Some("builtin"),
|
||||||
doc = s"""
|
doc = s"""
|
||||||
|
|
Loading…
Reference in a new issue