[SPARK-16371][SQL] Two follow-up tasks

## What changes were proposed in this pull request?
This is a small follow-up for SPARK-16371:

1. Hide removeMetadata from public API.
2. Add JIRA ticket number to test case name.

## How was this patch tested?
Updated a test comment.

Author: Reynold Xin <rxin@databricks.com>

Closes #14074 from rxin/parquet-filter.
This commit is contained in:
Reynold Xin 2016-07-06 15:04:37 -07:00
parent 9c041990cf
commit 8e3e4ed6c0
2 changed files with 3 additions and 3 deletions

View file

@ -413,10 +413,10 @@ object StructType extends AbstractDataType {
StructType(fields.asScala)
}
protected[sql] def fromAttributes(attributes: Seq[Attribute]): StructType =
private[sql] def fromAttributes(attributes: Seq[Attribute]): StructType =
StructType(attributes.map(a => StructField(a.name, a.dataType, a.nullable, a.metadata)))
def removeMetadata(key: String, dt: DataType): DataType =
private[sql] def removeMetadata(key: String, dt: DataType): DataType =
dt match {
case StructType(fields) =>
val newFields = fields.map { f =>

View file

@ -544,7 +544,7 @@ class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSQLContex
}
}
test("Do not push down filters incorrectly when inner name and outer name are the same") {
test("SPARK-16371 Do not push down filters when inner name and outer name are the same") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Tuple1(i)))) { implicit df =>
// Here the schema becomes as below:
//