[SPARK-2817] [SQL] add "show create table" support
In spark sql component, the "show create table" syntax had been disabled. We thought it is a useful funciton to describe a hive table. Author: tianyi <tianyi@asiainfo-linkage.com> Author: tianyi <tianyi@asiainfo.com> Author: tianyi <tianyi.asiainfo@gmail.com> Closes #1760 from tianyi/spark-2817 and squashes the following commits: 7d28b15 [tianyi] [SPARK-2817] fix too short prefix problem cbffe8b [tianyi] [SPARK-2817] fix the case problem 565ec14 [tianyi] [SPARK-2817] fix the case problem 60d48a9 [tianyi] [SPARK-2817] use system temporary folder instead of temporary files in the source tree, and also clean some empty line dbe1031 [tianyi] [SPARK-2817] move some code out of function rewritePaths, as it may be called multiple times 9b2ba11 [tianyi] [SPARK-2817] fix the line length problem 9f97586 [tianyi] [SPARK-2817] remove test.tmp.dir from pom.xml bfc2999 [tianyi] [SPARK-2817] add "File.separator" support, create a "testTmpDir" outside the rewritePaths bde800a [tianyi] [SPARK-2817] add "${system:test.tmp.dir}" support add "last_modified_by" to nonDeterministicLineIndicators in HiveComparisonTest bb82726 [tianyi] [SPARK-2817] remove test which requires a system from the whitelist. bbf6b42 [tianyi] [SPARK-2817] add a systemProperties named "test.tmp.dir" to pass the test which contains "${system:test.tmp.dir}" a337bd6 [tianyi] [SPARK-2817] add "show create table" support a03db77 [tianyi] [SPARK-2817] add "show create table" support
This commit is contained in:
parent
bdc7a1a474
commit
13f54e2b97
|
@ -635,6 +635,14 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
|
|||
"serde_regex",
|
||||
"serde_reported_schema",
|
||||
"set_variable_sub",
|
||||
"show_create_table_partitioned",
|
||||
"show_create_table_delimited",
|
||||
"show_create_table_alter",
|
||||
"show_create_table_view",
|
||||
"show_create_table_serde",
|
||||
"show_create_table_db_table",
|
||||
"show_create_table_does_not_exist",
|
||||
"show_create_table_index",
|
||||
"show_describe_func_quotes",
|
||||
"show_functions",
|
||||
"show_partitions",
|
||||
|
|
|
@ -53,6 +53,7 @@ private[hive] object HiveQl {
|
|||
protected val nativeCommands = Seq(
|
||||
"TOK_DESCFUNCTION",
|
||||
"TOK_DESCDATABASE",
|
||||
"TOK_SHOW_CREATETABLE",
|
||||
"TOK_SHOW_TABLESTATUS",
|
||||
"TOK_SHOWDATABASES",
|
||||
"TOK_SHOWFUNCTIONS",
|
||||
|
|
|
@ -70,6 +70,13 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
|
|||
setConf("hive.metastore.warehouse.dir", warehousePath)
|
||||
}
|
||||
|
||||
val testTempDir = File.createTempFile("testTempFiles", "spark.hive.tmp")
|
||||
testTempDir.delete()
|
||||
testTempDir.mkdir()
|
||||
|
||||
// For some hive test case which contain ${system:test.tmp.dir}
|
||||
System.setProperty("test.tmp.dir", testTempDir.getCanonicalPath)
|
||||
|
||||
configure() // Must be called before initializing the catalog below.
|
||||
|
||||
/** The location of the compiled hive distribution */
|
||||
|
@ -109,6 +116,7 @@ class TestHiveContext(sc: SparkContext) extends HiveContext(sc) {
|
|||
hiveFilesTemp.mkdir()
|
||||
hiveFilesTemp.deleteOnExit()
|
||||
|
||||
|
||||
val inRepoTests = if (System.getProperty("user.dir").endsWith("sql" + File.separator + "hive")) {
|
||||
new File("src" + File.separator + "test" + File.separator + "resources" + File.separator)
|
||||
} else {
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
CREATE EXTERNAL TABLE tmp_showcrt1(
|
||||
key smallint,
|
||||
value float)
|
||||
CLUSTERED BY (
|
||||
key)
|
||||
SORTED BY (
|
||||
value DESC)
|
||||
INTO 5 BUCKETS
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.mapred.TextInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'transient_lastDdlTime'='1407132100')
|
|
@ -0,0 +1,22 @@
|
|||
CREATE TABLE tmp_showcrt1(
|
||||
key smallint,
|
||||
value float)
|
||||
COMMENT 'temporary table'
|
||||
CLUSTERED BY (
|
||||
key)
|
||||
SORTED BY (
|
||||
value DESC)
|
||||
INTO 5 BUCKETS
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.mapred.TextInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'EXTERNAL'='FALSE',
|
||||
'last_modified_by'='tianyi',
|
||||
'last_modified_time'='1407132100',
|
||||
'transient_lastDdlTime'='1407132100')
|
|
@ -0,0 +1,21 @@
|
|||
CREATE EXTERNAL TABLE tmp_showcrt1(
|
||||
key smallint,
|
||||
value float)
|
||||
COMMENT 'changed comment'
|
||||
CLUSTERED BY (
|
||||
key)
|
||||
SORTED BY (
|
||||
value DESC)
|
||||
INTO 5 BUCKETS
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.mapred.TextInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'last_modified_by'='tianyi',
|
||||
'last_modified_time'='1407132100',
|
||||
'transient_lastDdlTime'='1407132100')
|
|
@ -0,0 +1,21 @@
|
|||
CREATE EXTERNAL TABLE tmp_showcrt1(
|
||||
key smallint,
|
||||
value float)
|
||||
COMMENT 'changed comment'
|
||||
CLUSTERED BY (
|
||||
key)
|
||||
SORTED BY (
|
||||
value DESC)
|
||||
INTO 5 BUCKETS
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.mapred.TextInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'last_modified_by'='tianyi',
|
||||
'last_modified_time'='1407132101',
|
||||
'transient_lastDdlTime'='1407132101')
|
|
@ -0,0 +1,21 @@
|
|||
CREATE EXTERNAL TABLE tmp_showcrt1(
|
||||
key smallint,
|
||||
value float)
|
||||
COMMENT 'changed comment'
|
||||
CLUSTERED BY (
|
||||
key)
|
||||
SORTED BY (
|
||||
value DESC)
|
||||
INTO 5 BUCKETS
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|
||||
STORED BY
|
||||
'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
|
||||
WITH SERDEPROPERTIES (
|
||||
'serialization.format'='1')
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'last_modified_by'='tianyi',
|
||||
'last_modified_time'='1407132101',
|
||||
'transient_lastDdlTime'='1407132101')
|
|
@ -0,0 +1,2 @@
|
|||
default
|
||||
tmp_feng
|
|
@ -0,0 +1,13 @@
|
|||
CREATE TABLE tmp_feng.tmp_showcrt(
|
||||
key string,
|
||||
value int)
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.mapred.TextInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_feng.db/tmp_showcrt'
|
||||
TBLPROPERTIES (
|
||||
'transient_lastDdlTime'='1407132107')
|
|
@ -0,0 +1,17 @@
|
|||
CREATE TABLE tmp_showcrt1(
|
||||
key int,
|
||||
value string,
|
||||
newvalue bigint)
|
||||
ROW FORMAT DELIMITED
|
||||
FIELDS TERMINATED BY ','
|
||||
COLLECTION ITEMS TERMINATED BY '|'
|
||||
MAP KEYS TERMINATED BY '%'
|
||||
LINES TERMINATED BY '\n'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.mapred.TextInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'transient_lastDdlTime'='1407132730')
|
|
@ -0,0 +1,16 @@
|
|||
CREATE EXTERNAL TABLE tmp_showcrt1(
|
||||
key string,
|
||||
newvalue boolean COMMENT 'a new value')
|
||||
COMMENT 'temporary table'
|
||||
PARTITIONED BY (
|
||||
value bigint COMMENT 'some value')
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.mapred.TextInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'transient_lastDdlTime'='1407132112')
|
|
@ -0,0 +1,15 @@
|
|||
CREATE TABLE tmp_showcrt1(
|
||||
key int,
|
||||
value string,
|
||||
newvalue bigint)
|
||||
COMMENT 'temporary table'
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
|
||||
STORED AS INPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
|
||||
OUTPUTFORMAT
|
||||
'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'transient_lastDdlTime'='1407132115')
|
|
@ -0,0 +1,14 @@
|
|||
CREATE EXTERNAL TABLE tmp_showcrt1(
|
||||
key string,
|
||||
value boolean)
|
||||
ROW FORMAT SERDE
|
||||
'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
|
||||
STORED BY
|
||||
'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
|
||||
WITH SERDEPROPERTIES (
|
||||
'serialization.format'='$',
|
||||
'field.delim'=',')
|
||||
LOCATION
|
||||
'file:/tmp/sparkHiveWarehouse1280221975983654134/tmp_showcrt1'
|
||||
TBLPROPERTIES (
|
||||
'transient_lastDdlTime'='1407132115')
|
|
@ -0,0 +1 @@
|
|||
CREATE VIEW tmp_copy_src AS SELECT `src`.`key`, `src`.`value` FROM `default`.`src`
|
|
@ -161,6 +161,7 @@ abstract class HiveComparisonTest
|
|||
"transient_lastDdlTime",
|
||||
"grantTime",
|
||||
"lastUpdateTime",
|
||||
"last_modified_by",
|
||||
"last_modified_time",
|
||||
"Owner:",
|
||||
// The following are hive specific schema parameters which we do not need to match exactly.
|
||||
|
|
Loading…
Reference in a new issue