[SPARK-4662] [SQL] Whitelist more unittest

Whitelist more hive unit test:

"create_like_tbl_props"
"udf5"
"udf_java_method"
"decimal_1"
"udf_pmod"
"udf_to_double"
"udf_to_float"
"udf7" (this will fail in Hive 0.12)

Author: Cheng Hao <hao.cheng@intel.com>

Closes #3522 from chenghao-intel/unittest and squashes the following commits:

f54e4c7 [Cheng Hao] work around to clean up the hive.table.parameters.default in reset
16fee22 [Cheng Hao] Whitelist more unittest
This commit is contained in:
Cheng Hao 2014-12-11 22:43:02 -08:00 committed by Michael Armbrust
parent bf40cf89e3
commit a7f07f511c
110 changed files with 245 additions and 8 deletions

View file

@ -89,7 +89,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"authorization_5",
"keyword_1",
"misc_json",
"create_like_tbl_props",
"load_overwrite",
"alter_table_serde2",
"alter_table_not_sorted",
@ -100,8 +99,7 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"protectmode2",
//"describe_table",
"describe_comment_nonascii",
"udf5",
"udf_java_method",
"create_merge_compressed",
"create_view_partitioned",
"database_location",
@ -221,16 +219,11 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"orc_predicate_pushdown",
// Requires precision decimal support:
"decimal_1",
"udf_pmod",
"udf_when",
"udf_case",
"udf_to_double",
"udf_to_float",
// Needs constant object inspectors
"udf_round",
"udf7",
// Sort with Limit clause causes failure.
"ctas",
@ -351,6 +344,7 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"count",
"cp_mj_rc",
"create_insert_outputformat",
"create_like_tbl_props",
"create_like_view",
"create_nested_type",
"create_skewed_table1",
@ -785,6 +779,7 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"udaf_covar_samp",
"udaf_histogram_numeric",
"udf2",
"udf5",
"udf6",
"udf7",
"udf8",

View file

@ -426,6 +426,8 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
// other sql exec here.
runSqlHive("RESET")
// For some reason, RESET does not reset the following variables...
// https://issues.apache.org/jira/browse/HIVE-9004
runSqlHive("set hive.table.parameters.default=")
runSqlHive("set datanucleus.cache.collections=true")
runSqlHive("set datanucleus.cache.collections.lazy=true")
// Lots of tests fail if we do not change the partition whitelist from the default.

View file

@ -0,0 +1,28 @@
# col_name data_type comment
key int
value string
# Detailed Table Information
Database: default
Owner: hcheng
CreateTime: Fri Nov 28 00:04:15 PST 2014
LastAccessTime: UNKNOWN
Protect Mode: None
Retention: 0
Location: file:/tmp/sparkHiveWarehouse3490012261419180285/test_table
Table Type: MANAGED_TABLE
Table Parameters:
key value
transient_lastDdlTime 1417161855
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1

View file

@ -0,0 +1,29 @@
# col_name data_type comment
key int
value string
# Detailed Table Information
Database: default
Owner: hcheng
CreateTime: Fri Nov 28 00:04:16 PST 2014
LastAccessTime: UNKNOWN
Protect Mode: None
Retention: 0
Location: file:/tmp/sparkHiveWarehouse3490012261419180285/test_table4
Table Type: MANAGED_TABLE
Table Parameters:
key value
key1 value1
transient_lastDdlTime 1417161856
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1

View file

@ -0,0 +1,28 @@
# col_name data_type comment
key int
value string
# Detailed Table Information
Database: default
Owner: hcheng
CreateTime: Fri Nov 28 00:04:15 PST 2014
LastAccessTime: UNKNOWN
Protect Mode: None
Retention: 0
Location: file:/tmp/sparkHiveWarehouse3490012261419180285/test_table1
Table Type: MANAGED_TABLE
Table Parameters:
key1 value1
transient_lastDdlTime 1417161855
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1

View file

@ -0,0 +1,29 @@
# col_name data_type comment
key int
value string
# Detailed Table Information
Database: default
Owner: hcheng
CreateTime: Fri Nov 28 00:04:16 PST 2014
LastAccessTime: UNKNOWN
Protect Mode: None
Retention: 0
Location: file:/tmp/sparkHiveWarehouse3490012261419180285/test_table2
Table Type: MANAGED_TABLE
Table Parameters:
key1 value1
key2 value2
transient_lastDdlTime 1417161856
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1

View file

@ -0,0 +1,29 @@
# col_name data_type comment
key int
value string
# Detailed Table Information
Database: default
Owner: hcheng
CreateTime: Fri Nov 28 00:04:16 PST 2014
LastAccessTime: UNKNOWN
Protect Mode: None
Retention: 0
Location: file:/tmp/sparkHiveWarehouse3490012261419180285/test_table3
Table Type: MANAGED_TABLE
Table Parameters:
key1 value1
key2 value3
transient_lastDdlTime 1417161856
# Storage Information
SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
InputFormat: org.apache.hadoop.mapred.TextInputFormat
OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
Sort Columns: []
Storage Desc Params:
serialization.format 1

View file

@ -0,0 +1 @@
1969-12-31 16:00:17.29

View file

@ -0,0 +1,3 @@
t decimal(4,2)
u decimal(5,0)
v decimal(10,0)

View file

@ -0,0 +1 @@
2008-11-11 15:32:20 2008-11-11 1 11 2008 1 11 2008

View file

@ -0,0 +1 @@
01/13/10 11:57:40 2010-01-13 11:57:40

View file

@ -0,0 +1 @@
java_method(class,method[,arg1[,arg2..]]) calls method with reflection

View file

@ -0,0 +1 @@
java_method(class,method[,arg1[,arg2..]]) calls method with reflection

View file

@ -0,0 +1,4 @@
java_method(class,method[,arg1[,arg2..]]) calls method with reflection
Synonyms: reflect
Use this UDF to call Java methods by matching the argument signature

View file

@ -0,0 +1,4 @@
java_method(class,method[,arg1[,arg2..]]) calls method with reflection
Synonyms: reflect
Use this UDF to call Java methods by matching the argument signature

View file

@ -0,0 +1 @@
1 true 3 2 3 2.718281828459045 1.0

View file

@ -0,0 +1 @@
1 true 3 2 3 2.718281828459045 1.0

View file

@ -0,0 +1 @@
a pmod b - Compute the positive modulo

View file

@ -0,0 +1 @@
a pmod b - Compute the positive modulo

View file

@ -0,0 +1 @@
a pmod b - Compute the positive modulo

View file

@ -0,0 +1 @@
6.89 51.7 18.09

View file

@ -0,0 +1 @@
6.890000000000011 51.699999999999996 18.090000000000003

View file

@ -0,0 +1 @@
6.89 51.7 18.09

View file

@ -0,0 +1 @@
a pmod b - Compute the positive modulo

View file

@ -0,0 +1 @@
6.8899984 51.700005 18.089996

View file

@ -0,0 +1 @@
6.8899984 51.700005 18.089996

View file

@ -0,0 +1 @@
6.890000000000011 51.699999999999996 18.090000000000003

View file

@ -0,0 +1 @@
round(x[, d]) - round x to d decimal places

View file

@ -0,0 +1,4 @@
round(x[, d]) - round x to d decimal places
Example:
> SELECT round(12.3456, 1) FROM src LIMIT 1;
12.3'

View file

@ -0,0 +1 @@
-3.140000104904175

View file

@ -0,0 +1 @@
-3.140000104904175

Some files were not shown because too many files have changed in this diff Show more