[SPARK-24322][BUILD] Upgrade Apache ORC to 1.4.4

## What changes were proposed in this pull request?

ORC 1.4.4 includes [nine fixes](https://issues.apache.org/jira/issues/?filter=12342568&jql=project%20%3D%20ORC%20AND%20resolution%20%3D%20Fixed%20AND%20fixVersion%20%3D%201.4.4). One of the issues is about `Timestamp` bug (ORC-306) which occurs when `native` ORC vectorized reader reads ORC column vector's sub-vector `times` and `nanos`. ORC-306 fixes this according to the [original definition](https://github.com/apache/hive/blob/master/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java#L45-L46) and this PR includes the updated interpretation on ORC column vectors. Note that `hive` ORC reader and ORC MR reader is not affected.

```scala
scala> spark.version
res0: String = 2.3.0
scala> spark.sql("set spark.sql.orc.impl=native")
scala> Seq(java.sql.Timestamp.valueOf("1900-05-05 12:34:56.000789")).toDF().write.orc("/tmp/orc")
scala> spark.read.orc("/tmp/orc").show(false)
+--------------------------+
|value                     |
+--------------------------+
|1900-05-05 12:34:55.000789|
+--------------------------+
```

This PR aims to update Apache Spark to use it.

**FULL LIST**

ID | TITLE
-- | --
ORC-281 | Fix compiler warnings from clang 5.0
ORC-301 | `extractFileTail` should open a file in `try` statement
ORC-304 | Fix TestRecordReaderImpl to not fail with new storage-api
ORC-306 | Fix incorrect workaround for bug in java.sql.Timestamp
ORC-324 | Add support for ARM and PPC arch
ORC-330 | Remove unnecessary Hive artifacts from root pom
ORC-332 | Add syntax version to orc_proto.proto
ORC-336 | Remove avro and parquet dependency management entries
ORC-360 | Implement error checking on subtype fields in Java

## How was this patch tested?

Pass the Jenkins.

Author: Dongjoon Hyun <dongjoon@apache.org>

Closes #21372 from dongjoon-hyun/SPARK_ORC144.
This commit is contained in:
Dongjoon Hyun 2018-05-24 11:34:13 +08:00 committed by Wenchen Fan
parent 888340151f
commit 486ecc680e
7 changed files with 18 additions and 9 deletions

View file

@ -157,8 +157,8 @@ objenesis-2.1.jar
okhttp-3.8.1.jar
okio-1.13.0.jar
opencsv-2.3.jar
orc-core-1.4.3-nohive.jar
orc-mapreduce-1.4.3-nohive.jar
orc-core-1.4.4-nohive.jar
orc-mapreduce-1.4.4-nohive.jar
oro-2.0.8.jar
osgi-resource-locator-1.0.1.jar
paranamer-2.8.jar

View file

@ -158,8 +158,8 @@ objenesis-2.1.jar
okhttp-3.8.1.jar
okio-1.13.0.jar
opencsv-2.3.jar
orc-core-1.4.3-nohive.jar
orc-mapreduce-1.4.3-nohive.jar
orc-core-1.4.4-nohive.jar
orc-mapreduce-1.4.4-nohive.jar
oro-2.0.8.jar
osgi-resource-locator-1.0.1.jar
paranamer-2.8.jar

View file

@ -176,8 +176,8 @@ okhttp-2.7.5.jar
okhttp-3.8.1.jar
okio-1.13.0.jar
opencsv-2.3.jar
orc-core-1.4.3-nohive.jar
orc-mapreduce-1.4.3-nohive.jar
orc-core-1.4.4-nohive.jar
orc-mapreduce-1.4.4-nohive.jar
oro-2.0.8.jar
osgi-resource-locator-1.0.1.jar
paranamer-2.8.jar

View file

@ -130,7 +130,7 @@
<hive.version.short>1.2.1</hive.version.short>
<derby.version>10.12.1.1</derby.version>
<parquet.version>1.10.0</parquet.version>
<orc.version>1.4.3</orc.version>
<orc.version>1.4.4</orc.version>
<orc.classifier>nohive</orc.classifier>
<hive.parquet.version>1.6.0</hive.parquet.version>
<jetty.version>9.3.20.v20170531</jetty.version>

View file

@ -136,7 +136,7 @@ public class OrcColumnVector extends org.apache.spark.sql.vectorized.ColumnVecto
public long getLong(int rowId) {
int index = getRowIndex(rowId);
if (isTimestamp) {
return timestampData.time[index] * 1000 + timestampData.nanos[index] / 1000;
return timestampData.time[index] * 1000 + timestampData.nanos[index] / 1000 % 1000;
} else {
return longData.vector[index];
}

View file

@ -497,7 +497,7 @@ public class OrcColumnarBatchReader extends RecordReader<Void, ColumnarBatch> {
* Returns the number of micros since epoch from an element of TimestampColumnVector.
*/
private static long fromTimestampColumnVector(TimestampColumnVector vector, int index) {
return vector.time[index] * 1000L + vector.nanos[index] / 1000L;
return vector.time[index] * 1000 + (vector.nanos[index] / 1000 % 1000);
}
/**

View file

@ -18,6 +18,7 @@
package org.apache.spark.sql.execution.datasources.orc
import java.io.File
import java.sql.Timestamp
import java.util.Locale
import org.apache.orc.OrcConf.COMPRESS
@ -169,6 +170,14 @@ abstract class OrcSuite extends OrcTest with BeforeAndAfterAll {
}
}
}
test("SPARK-24322 Fix incorrect workaround for bug in java.sql.Timestamp") {
withTempPath { path =>
val ts = Timestamp.valueOf("1900-05-05 12:34:56.000789")
Seq(ts).toDF.write.orc(path.getCanonicalPath)
checkAnswer(spark.read.orc(path.getCanonicalPath), Row(ts))
}
}
}
class OrcSourceSuite extends OrcSuite with SharedSQLContext {