[SPARK-32458][SQL][TESTS] Fix incorrectly sized row value reads

### What changes were proposed in this pull request?
Updates to tests to use correctly sized `getInt` or `getLong` calls.

### Why are the changes needed?
The reads were incorrectly sized (i.e. `putLong` paired with `getInt` and `putInt` paired with `getLong`). This causes test failures on big-endian systems.

### Does this PR introduce _any_ user-facing change?
No.

### How was this patch tested?
Tests were run on a big-endian system (s390x). This change is unlikely to have any practical effect on little-endian systems.

Closes #29258 from mundaym/fix-row.

Authored-by: Michael Munday <mike.munday@ibm.com>
Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
This commit is contained in:
Michael Munday 2020-07-28 10:36:20 -07:00 committed by Dongjoon Hyun
parent 44c868b73a
commit a3d80564ad
2 changed files with 5 additions and 5 deletions

View file

@ -336,7 +336,7 @@ class RowEncoderSuite extends CodegenInterpretedPlanTest {
val encoder = RowEncoder(schema).resolveAndBind() val encoder = RowEncoder(schema).resolveAndBind()
val localDate = java.time.LocalDate.parse("2019-02-27") val localDate = java.time.LocalDate.parse("2019-02-27")
val row = toRow(encoder, Row(localDate)) val row = toRow(encoder, Row(localDate))
assert(row.getLong(0) === DateTimeUtils.localDateToDays(localDate)) assert(row.getInt(0) === DateTimeUtils.localDateToDays(localDate))
val readback = fromRow(encoder, row) val readback = fromRow(encoder, row)
assert(readback.get(0).equals(localDate)) assert(readback.get(0).equals(localDate))
} }

View file

@ -48,8 +48,8 @@ class UnsafeMapSuite extends SparkFunSuite {
val ser = new JavaSerializer(new SparkConf).newInstance() val ser = new JavaSerializer(new SparkConf).newInstance()
val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData)) val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData))
assert(mapDataSer.numElements() == 1) assert(mapDataSer.numElements() == 1)
assert(mapDataSer.keyArray().getInt(0) == 19285) assert(mapDataSer.keyArray().getLong(0) == 19285)
assert(mapDataSer.valueArray().getInt(0) == 19286) assert(mapDataSer.valueArray().getLong(0) == 19286)
assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024) assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024)
} }
@ -57,8 +57,8 @@ class UnsafeMapSuite extends SparkFunSuite {
val ser = new KryoSerializer(new SparkConf).newInstance() val ser = new KryoSerializer(new SparkConf).newInstance()
val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData)) val mapDataSer = ser.deserialize[UnsafeMapData](ser.serialize(unsafeMapData))
assert(mapDataSer.numElements() == 1) assert(mapDataSer.numElements() == 1)
assert(mapDataSer.keyArray().getInt(0) == 19285) assert(mapDataSer.keyArray().getLong(0) == 19285)
assert(mapDataSer.valueArray().getInt(0) == 19286) assert(mapDataSer.valueArray().getLong(0) == 19286)
assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024) assert(mapDataSer.getBaseObject.asInstanceOf[Array[Byte]].length == 1024)
} }
} }