[HOTFIX] Use 'new Job' in fsBasedParquet.scala
Same issue as #6095 cc liancheng Author: zsxwing <zsxwing@gmail.com> Closes #6136 from zsxwing/hotfix and squashes the following commits: 4beea54 [zsxwing] Use 'new Job' in fsBasedParquet.scala
This commit is contained in:
parent
32e27df412
commit
728af88cf6
|
@ -231,7 +231,7 @@ private[sql] class FSBasedParquetRelation(
|
|||
filters: Array[Filter],
|
||||
inputPaths: Array[String]): RDD[Row] = {
|
||||
|
||||
val job = Job.getInstance(SparkHadoopUtil.get.conf)
|
||||
val job = new Job(SparkHadoopUtil.get.conf)
|
||||
val conf = ContextUtil.getConfiguration(job)
|
||||
|
||||
ParquetInputFormat.setReadSupportClass(job, classOf[RowReadSupport])
|
||||
|
|
Loading…
Reference in a new issue