[SPARK-8959] [SQL] [HOTFIX] Removes parquet-thrift and libthrift dependencies

These two dependencies were introduced in #7231 to help testing Parquet compatibility with `parquet-thrift`. However, they somehow crash the Scala compiler in Maven builds.

This PR fixes this issue by:

1. Removing these two dependencies, and
2. Instead of generating the testing Parquet file programmatically, checking in an actual testing Parquet file generated by `parquet-thrift` as a test resource.

This is just a quick fix to bring back Maven builds. Need to figure out the root case as binary Parquet files are harder to maintain.

Author: Cheng Lian <lian@databricks.com>

Closes #7330 from liancheng/spark-8959 and squashes the following commits:

cf69512 [Cheng Lian] Brings back Maven builds
This commit is contained in:
Cheng Lian 2015-07-09 17:09:16 -07:00
parent a0cc3e5aa3
commit 2d45571fcb
7 changed files with 8 additions and 3494 deletions

14
pom.xml
View file

@ -161,7 +161,6 @@
<fasterxml.jackson.version>2.4.4</fasterxml.jackson.version>
<snappy.version>1.1.1.7</snappy.version>
<netlib.java.version>1.1.2</netlib.java.version>
<thrift.version>0.9.2</thrift.version>
<!-- For maven shade plugin (see SPARK-8819) -->
<create.dependency.reduced.pom>false</create.dependency.reduced.pom>
@ -181,7 +180,6 @@
<hive.deps.scope>compile</hive.deps.scope>
<parquet.deps.scope>compile</parquet.deps.scope>
<parquet.test.deps.scope>test</parquet.test.deps.scope>
<thrift.test.deps.scope>test</thrift.test.deps.scope>
<!--
Overridable test home. So that you can call individual pom files directly without
@ -1122,18 +1120,6 @@
<version>${parquet.version}</version>
<scope>${parquet.test.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-thrift</artifactId>
<version>${parquet.version}</version>
<scope>${parquet.test.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<version>${thrift.version}</version>
<scope>${thrift.test.deps.scope}</scope>
</dependency>
<dependency>
<groupId>org.apache.flume</groupId>
<artifactId>flume-ng-core</artifactId>

View file

@ -106,16 +106,6 @@
<artifactId>parquet-avro</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.parquet</groupId>
<artifactId>parquet-thrift</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libthrift</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>

View file

@ -1,541 +0,0 @@
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.spark.sql.parquet.test.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-7")
public class Nested implements org.apache.thrift.TBase<Nested, Nested._Fields>, java.io.Serializable, Cloneable, Comparable<Nested> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Nested");
private static final org.apache.thrift.protocol.TField NESTED_INTS_COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("nestedIntsColumn", org.apache.thrift.protocol.TType.LIST, (short)1);
private static final org.apache.thrift.protocol.TField NESTED_STRING_COLUMN_FIELD_DESC = new org.apache.thrift.protocol.TField("nestedStringColumn", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new NestedStandardSchemeFactory());
schemes.put(TupleScheme.class, new NestedTupleSchemeFactory());
}
public List<Integer> nestedIntsColumn; // required
public String nestedStringColumn; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
NESTED_INTS_COLUMN((short)1, "nestedIntsColumn"),
NESTED_STRING_COLUMN((short)2, "nestedStringColumn");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // NESTED_INTS_COLUMN
return NESTED_INTS_COLUMN;
case 2: // NESTED_STRING_COLUMN
return NESTED_STRING_COLUMN;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.NESTED_INTS_COLUMN, new org.apache.thrift.meta_data.FieldMetaData("nestedIntsColumn", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
tmpMap.put(_Fields.NESTED_STRING_COLUMN, new org.apache.thrift.meta_data.FieldMetaData("nestedStringColumn", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Nested.class, metaDataMap);
}
public Nested() {
}
public Nested(
List<Integer> nestedIntsColumn,
String nestedStringColumn)
{
this();
this.nestedIntsColumn = nestedIntsColumn;
this.nestedStringColumn = nestedStringColumn;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public Nested(Nested other) {
if (other.isSetNestedIntsColumn()) {
List<Integer> __this__nestedIntsColumn = new ArrayList<Integer>(other.nestedIntsColumn);
this.nestedIntsColumn = __this__nestedIntsColumn;
}
if (other.isSetNestedStringColumn()) {
this.nestedStringColumn = other.nestedStringColumn;
}
}
public Nested deepCopy() {
return new Nested(this);
}
@Override
public void clear() {
this.nestedIntsColumn = null;
this.nestedStringColumn = null;
}
public int getNestedIntsColumnSize() {
return (this.nestedIntsColumn == null) ? 0 : this.nestedIntsColumn.size();
}
public java.util.Iterator<Integer> getNestedIntsColumnIterator() {
return (this.nestedIntsColumn == null) ? null : this.nestedIntsColumn.iterator();
}
public void addToNestedIntsColumn(int elem) {
if (this.nestedIntsColumn == null) {
this.nestedIntsColumn = new ArrayList<Integer>();
}
this.nestedIntsColumn.add(elem);
}
public List<Integer> getNestedIntsColumn() {
return this.nestedIntsColumn;
}
public Nested setNestedIntsColumn(List<Integer> nestedIntsColumn) {
this.nestedIntsColumn = nestedIntsColumn;
return this;
}
public void unsetNestedIntsColumn() {
this.nestedIntsColumn = null;
}
/** Returns true if field nestedIntsColumn is set (has been assigned a value) and false otherwise */
public boolean isSetNestedIntsColumn() {
return this.nestedIntsColumn != null;
}
public void setNestedIntsColumnIsSet(boolean value) {
if (!value) {
this.nestedIntsColumn = null;
}
}
public String getNestedStringColumn() {
return this.nestedStringColumn;
}
public Nested setNestedStringColumn(String nestedStringColumn) {
this.nestedStringColumn = nestedStringColumn;
return this;
}
public void unsetNestedStringColumn() {
this.nestedStringColumn = null;
}
/** Returns true if field nestedStringColumn is set (has been assigned a value) and false otherwise */
public boolean isSetNestedStringColumn() {
return this.nestedStringColumn != null;
}
public void setNestedStringColumnIsSet(boolean value) {
if (!value) {
this.nestedStringColumn = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case NESTED_INTS_COLUMN:
if (value == null) {
unsetNestedIntsColumn();
} else {
setNestedIntsColumn((List<Integer>)value);
}
break;
case NESTED_STRING_COLUMN:
if (value == null) {
unsetNestedStringColumn();
} else {
setNestedStringColumn((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case NESTED_INTS_COLUMN:
return getNestedIntsColumn();
case NESTED_STRING_COLUMN:
return getNestedStringColumn();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case NESTED_INTS_COLUMN:
return isSetNestedIntsColumn();
case NESTED_STRING_COLUMN:
return isSetNestedStringColumn();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof Nested)
return this.equals((Nested)that);
return false;
}
public boolean equals(Nested that) {
if (that == null)
return false;
boolean this_present_nestedIntsColumn = true && this.isSetNestedIntsColumn();
boolean that_present_nestedIntsColumn = true && that.isSetNestedIntsColumn();
if (this_present_nestedIntsColumn || that_present_nestedIntsColumn) {
if (!(this_present_nestedIntsColumn && that_present_nestedIntsColumn))
return false;
if (!this.nestedIntsColumn.equals(that.nestedIntsColumn))
return false;
}
boolean this_present_nestedStringColumn = true && this.isSetNestedStringColumn();
boolean that_present_nestedStringColumn = true && that.isSetNestedStringColumn();
if (this_present_nestedStringColumn || that_present_nestedStringColumn) {
if (!(this_present_nestedStringColumn && that_present_nestedStringColumn))
return false;
if (!this.nestedStringColumn.equals(that.nestedStringColumn))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_nestedIntsColumn = true && (isSetNestedIntsColumn());
list.add(present_nestedIntsColumn);
if (present_nestedIntsColumn)
list.add(nestedIntsColumn);
boolean present_nestedStringColumn = true && (isSetNestedStringColumn());
list.add(present_nestedStringColumn);
if (present_nestedStringColumn)
list.add(nestedStringColumn);
return list.hashCode();
}
@Override
public int compareTo(Nested other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetNestedIntsColumn()).compareTo(other.isSetNestedIntsColumn());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNestedIntsColumn()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.nestedIntsColumn, other.nestedIntsColumn);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetNestedStringColumn()).compareTo(other.isSetNestedStringColumn());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNestedStringColumn()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.nestedStringColumn, other.nestedStringColumn);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("Nested(");
boolean first = true;
sb.append("nestedIntsColumn:");
if (this.nestedIntsColumn == null) {
sb.append("null");
} else {
sb.append(this.nestedIntsColumn);
}
first = false;
if (!first) sb.append(", ");
sb.append("nestedStringColumn:");
if (this.nestedStringColumn == null) {
sb.append("null");
} else {
sb.append(this.nestedStringColumn);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (nestedIntsColumn == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'nestedIntsColumn' was not present! Struct: " + toString());
}
if (nestedStringColumn == null) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'nestedStringColumn' was not present! Struct: " + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class NestedStandardSchemeFactory implements SchemeFactory {
public NestedStandardScheme getScheme() {
return new NestedStandardScheme();
}
}
private static class NestedStandardScheme extends StandardScheme<Nested> {
public void read(org.apache.thrift.protocol.TProtocol iprot, Nested struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NESTED_INTS_COLUMN
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
struct.nestedIntsColumn = new ArrayList<Integer>(_list0.size);
int _elem1;
for (int _i2 = 0; _i2 < _list0.size; ++_i2)
{
_elem1 = iprot.readI32();
struct.nestedIntsColumn.add(_elem1);
}
iprot.readListEnd();
}
struct.setNestedIntsColumnIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // NESTED_STRING_COLUMN
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.nestedStringColumn = iprot.readString();
struct.setNestedStringColumnIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, Nested struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.nestedIntsColumn != null) {
oprot.writeFieldBegin(NESTED_INTS_COLUMN_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, struct.nestedIntsColumn.size()));
for (int _iter3 : struct.nestedIntsColumn)
{
oprot.writeI32(_iter3);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (struct.nestedStringColumn != null) {
oprot.writeFieldBegin(NESTED_STRING_COLUMN_FIELD_DESC);
oprot.writeString(struct.nestedStringColumn);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class NestedTupleSchemeFactory implements SchemeFactory {
public NestedTupleScheme getScheme() {
return new NestedTupleScheme();
}
}
private static class NestedTupleScheme extends TupleScheme<Nested> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, Nested struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
{
oprot.writeI32(struct.nestedIntsColumn.size());
for (int _iter4 : struct.nestedIntsColumn)
{
oprot.writeI32(_iter4);
}
}
oprot.writeString(struct.nestedStringColumn);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, Nested struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
{
org.apache.thrift.protocol.TList _list5 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, iprot.readI32());
struct.nestedIntsColumn = new ArrayList<Integer>(_list5.size);
int _elem6;
for (int _i7 = 0; _i7 < _list5.size; ++_i7)
{
_elem6 = iprot.readI32();
struct.nestedIntsColumn.add(_elem6);
}
}
struct.setNestedIntsColumnIsSet(true);
struct.nestedStringColumn = iprot.readString();
struct.setNestedStringColumnIsSet(true);
}
}
}

View file

@ -1,51 +0,0 @@
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.spark.sql.parquet.test.thrift;
import java.util.Map;
import java.util.HashMap;
import org.apache.thrift.TEnum;
public enum Suit implements org.apache.thrift.TEnum {
SPADES(0),
HEARTS(1),
DIAMONDS(2),
CLUBS(3);
private final int value;
private Suit(int value) {
this.value = value;
}
/**
* Get the integer value of this enum value, as defined in the Thrift IDL.
*/
public int getValue() {
return value;
}
/**
* Find a the enum type by its integer value, as defined in the Thrift IDL.
* @return null if the value is not found.
*/
public static Suit findByValue(int value) {
switch (value) {
case 0:
return SPADES;
case 1:
return HEARTS;
case 2:
return DIAMONDS;
case 3:
return CLUBS;
default:
return null;
}
}
}

View file

@ -17,16 +17,6 @@
package org.apache.spark.sql.parquet
import java.nio.ByteBuffer
import java.util.{List => JList, Map => JMap}
import scala.collection.JavaConversions._
import org.apache.hadoop.fs.Path
import org.apache.parquet.hadoop.metadata.CompressionCodecName
import org.apache.parquet.thrift.ThriftParquetWriter
import org.apache.spark.sql.parquet.test.thrift.{Nested, ParquetThriftCompat, Suit}
import org.apache.spark.sql.test.TestSQLContext
import org.apache.spark.sql.{Row, SQLContext}
@ -35,28 +25,20 @@ class ParquetThriftCompatibilitySuite extends ParquetCompatibilityTest {
override val sqlContext: SQLContext = TestSQLContext
override protected def beforeAll(): Unit = {
super.beforeAll()
val writer =
new ThriftParquetWriter[ParquetThriftCompat](
new Path(parquetStore.getCanonicalPath),
classOf[ParquetThriftCompat],
CompressionCodecName.SNAPPY)
(0 until 10).foreach(i => writer.write(makeParquetThriftCompat(i)))
writer.close()
}
private val parquetFilePath =
Thread.currentThread().getContextClassLoader.getResource("parquet-thrift-compat.snappy.parquet")
test("Read Parquet file generated by parquet-thrift") {
logInfo(
s"""Schema of the Parquet file written by parquet-thrift:
|${readParquetSchema(parquetStore.getCanonicalPath)}
|${readParquetSchema(parquetFilePath.toString)}
""".stripMargin)
checkAnswer(sqlContext.read.parquet(parquetStore.getCanonicalPath), (0 until 10).map { i =>
checkAnswer(sqlContext.read.parquet(parquetFilePath.toString), (0 until 10).map { i =>
def nullable[T <: AnyRef]: ( => T) => T = makeNullable[T](i)
val suits = Array("SPADES", "HEARTS", "DIAMONDS", "CLUBS")
Row(
i % 2 == 0,
i.toByte,
@ -70,7 +52,7 @@ class ParquetThriftCompatibilitySuite extends ParquetCompatibilityTest {
s"val_$i",
s"val_$i",
// Thrift ENUM values are converted to Parquet binaries containing UTF-8 strings
Suit.values()(i % 4).name(),
suits(i % 4),
nullable(i % 2 == 0: java.lang.Boolean),
nullable(i.toByte: java.lang.Byte),
@ -80,7 +62,7 @@ class ParquetThriftCompatibilitySuite extends ParquetCompatibilityTest {
nullable(i.toDouble + 0.2d: java.lang.Double),
nullable(s"val_$i"),
nullable(s"val_$i"),
nullable(Suit.values()(i % 4).name()),
nullable(suits(i % 4)),
Seq.tabulate(3)(n => s"arr_${i + n}"),
// Thrift `SET`s are converted to Parquet `LIST`s
@ -93,48 +75,4 @@ class ParquetThriftCompatibilitySuite extends ParquetCompatibilityTest {
}.toMap)
})
}
def makeParquetThriftCompat(i: Int): ParquetThriftCompat = {
def makeComplexColumn(i: Int): JMap[Integer, JList[Nested]] = {
mapAsJavaMap(Seq.tabulate(3) { n =>
(i + n: Integer) -> seqAsJavaList(Seq.tabulate(3) { m =>
new Nested(
seqAsJavaList(Seq.tabulate(3)(j => i + j + m)),
s"val_${i + m}")
})
}.toMap)
}
val value =
new ParquetThriftCompat(
i % 2 == 0,
i.toByte,
(i + 1).toShort,
i + 2,
i.toLong * 10,
i.toDouble + 0.2d,
ByteBuffer.wrap(s"val_$i".getBytes),
s"val_$i",
Suit.values()(i % 4),
seqAsJavaList(Seq.tabulate(3)(n => s"arr_${i + n}")),
setAsJavaSet(Set(i)),
mapAsJavaMap(Seq.tabulate(3)(n => (i + n: Integer) -> s"val_${i + n}").toMap),
makeComplexColumn(i))
if (i % 3 == 0) {
value
} else {
value
.setMaybeBoolColumn(i % 2 == 0)
.setMaybeByteColumn(i.toByte)
.setMaybeShortColumn((i + 1).toShort)
.setMaybeIntColumn(i + 2)
.setMaybeLongColumn(i.toLong * 10)
.setMaybeDoubleColumn(i.toDouble + 0.2d)
.setMaybeBinaryColumn(ByteBuffer.wrap(s"val_$i".getBytes))
.setMaybeStringColumn(s"val_$i")
.setMaybeEnumColumn(Suit.values()(i % 4))
}
}
}