hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From pxi...@apache.org
Subject [01/19] hive git commit: HIVE-15591: Hive can not use , in quoted column name (Pengcheng Xiong, reviewed by Ashutosh Chauhan)
Date Mon, 23 Jan 2017 18:38:19 GMT
Repository: hive
Updated Branches:
  refs/heads/master 38c3f1a58 -> 588c3911a


http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/udtf_explode.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udtf_explode.q.out b/ql/src/test/results/clientpositive/udtf_explode.q.out
index 8f68b35..2b19296 100644
--- a/ql/src/test/results/clientpositive/udtf_explode.q.out
+++ b/ql/src/test/results/clientpositive/udtf_explode.q.out
@@ -91,6 +91,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -111,6 +112,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -153,6 +155,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     properties:
+                      column.name.delimiter ,
                       columns _col0,_col1
                       columns.types int,bigint
                       escape.delim \
@@ -185,6 +188,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1
               columns.types int,bigint
               escape.delim \
@@ -194,6 +198,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1
                 columns.types int,bigint
                 escape.delim \
@@ -351,6 +356,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -371,6 +377,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -413,6 +420,7 @@ STAGE PLANS:
                     input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                     output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                     properties:
+                      column.name.delimiter ,
                       columns _col0,_col1,_col2
                       columns.types int,string,bigint
                       escape.delim \
@@ -445,6 +453,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1,_col2
               columns.types int,string,bigint
               escape.delim \
@@ -454,6 +463,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1,_col2
                 columns.types int,string,bigint
                 escape.delim \

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/union22.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union22.q.out b/ql/src/test/results/clientpositive/union22.q.out
index 9e44492..f60dbf6 100644
--- a/ql/src/test/results/clientpositive/union22.q.out
+++ b/ql/src/test/results/clientpositive/union22.q.out
@@ -99,6 +99,7 @@ STAGE PLANS:
                   properties:
                     COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                     bucket_count -1
+                    column.name.delimiter ,
                     columns k0,k1,k2,k3,k4,k5
                     columns.comments 
                     columns.types string:string:string:string:string:string
@@ -120,6 +121,7 @@ STAGE PLANS:
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
                       bucket_count -1
+                      column.name.delimiter ,
                       columns k0,k1,k2,k3,k4,k5
                       columns.comments 
                       columns.types string:string:string:string:string:string
@@ -201,6 +203,7 @@ STAGE PLANS:
                           input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                           properties:
+                            column.name.delimiter ,
                             columns _col0,_col1,_col2,_col3
                             columns.types string,string,string,string
                             escape.delim \
@@ -224,6 +227,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns k1,k2,k3,k4
               columns.comments 
               columns.types string:string:string:string
@@ -245,6 +249,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns k1,k2,k3,k4
                 columns.comments 
                 columns.types string:string:string:string
@@ -269,6 +274,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns k0,k1,k2,k3,k4,k5
               columns.comments 
               columns.types string:string:string:string:string:string
@@ -290,6 +296,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns k0,k1,k2,k3,k4,k5
                 columns.comments 
                 columns.types string:string:string:string:string:string
@@ -337,6 +344,7 @@ STAGE PLANS:
                         output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                         properties:
                           bucket_count -1
+                          column.name.delimiter ,
                           columns k1,k2,k3,k4
                           columns.comments 
                           columns.types string:string:string:string
@@ -370,6 +378,7 @@ STAGE PLANS:
                     output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
                     properties:
                       bucket_count -1
+                      column.name.delimiter ,
                       columns k1,k2,k3,k4
                       columns.comments 
                       columns.types string:string:string:string
@@ -395,6 +404,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1,_col2,_col3
               columns.types string,string,string,string
               escape.delim \
@@ -404,6 +414,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1,_col2,_col3
                 columns.types string,string,string,string
                 escape.delim \
@@ -419,6 +430,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns k0,k1,k2,k3,k4,k5
               columns.comments 
               columns.types string:string:string:string:string:string
@@ -440,6 +452,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns k0,k1,k2,k3,k4,k5
                 columns.comments 
                 columns.types string:string:string:string:string:string
@@ -470,6 +483,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns k1,k2,k3,k4
                 columns.comments 
                 columns.types string:string:string:string
@@ -546,6 +560,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns k1,k2,k3,k4
               columns.comments 
               columns.types string:string:string:string
@@ -567,6 +582,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns k1,k2,k3,k4
                 columns.comments 
                 columns.types string:string:string:string
@@ -591,6 +607,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns k0,k1,k2,k3,k4,k5
               columns.comments 
               columns.types string:string:string:string:string:string
@@ -612,6 +629,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns k0,k1,k2,k3,k4,k5
                 columns.comments 
                 columns.types string:string:string:string:string:string
@@ -657,6 +675,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                   properties:
+                    column.name.delimiter ,
                     columns _col0,_col1,_col2,_col3
                     columns.types string,string,string,string
                     escape.delim \

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/union24.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union24.q.out b/ql/src/test/results/clientpositive/union24.q.out
index ef3a224..d6b1a79 100644
--- a/ql/src/test/results/clientpositive/union24.q.out
+++ b/ql/src/test/results/clientpositive/union24.q.out
@@ -115,6 +115,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -135,6 +136,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -170,6 +172,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 properties:
+                  column.name.delimiter ,
                   columns _col0,_col1
                   columns.types string,bigint
                   escape.delim \
@@ -325,6 +328,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1
               columns.types string,bigint
               escape.delim \
@@ -334,6 +338,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1
                 columns.types string,bigint
                 escape.delim \
@@ -347,6 +352,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -367,6 +373,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -391,6 +398,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -411,6 +419,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -435,6 +444,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -455,6 +465,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -616,6 +627,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -636,6 +648,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -660,6 +673,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -680,6 +694,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -722,6 +737,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                   properties:
+                    column.name.delimiter ,
                     columns _col0,_col1
                     columns.types string,bigint
                     escape.delim \
@@ -841,6 +857,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1
               columns.types string,bigint
               escape.delim \
@@ -850,6 +867,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1
                 columns.types string,bigint
                 escape.delim \
@@ -863,6 +881,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -883,6 +902,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -907,6 +927,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -927,6 +948,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -1077,6 +1099,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -1097,6 +1120,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -1121,6 +1145,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -1141,6 +1166,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -1185,6 +1211,7 @@ STAGE PLANS:
                   input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                   output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                   properties:
+                    column.name.delimiter ,
                     columns _col0,_col1
                     columns.types string,bigint
                     escape.delim \
@@ -1217,6 +1244,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1
               columns.types string,bigint
               escape.delim \
@@ -1226,6 +1254,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1
                 columns.types string,bigint
                 escape.delim \
@@ -1250,6 +1279,7 @@ STAGE PLANS:
                 input format: org.apache.hadoop.mapred.SequenceFileInputFormat
                 output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
                 properties:
+                  column.name.delimiter ,
                   columns _col0,_col1
                   columns.types string,bigint
                   escape.delim \
@@ -1369,6 +1399,7 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
+              column.name.delimiter ,
               columns _col0,_col1
               columns.types string,bigint
               escape.delim \
@@ -1378,6 +1409,7 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
+                column.name.delimiter ,
                 columns _col0,_col1
                 columns.types string,bigint
                 escape.delim \
@@ -1391,6 +1423,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -1411,6 +1444,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint
@@ -1435,6 +1469,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
               bucket_count -1
+              column.name.delimiter ,
               columns key,count
               columns.comments 
               columns.types string:bigint
@@ -1455,6 +1490,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,count
                 columns.comments 
                 columns.types string:bigint

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/union_ppr.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_ppr.q.out b/ql/src/test/results/clientpositive/union_ppr.q.out
index 99d2d3b..8633c12 100644
--- a/ql/src/test/results/clientpositive/union_ppr.q.out
+++ b/ql/src/test/results/clientpositive/union_ppr.q.out
@@ -88,6 +88,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -109,6 +110,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string
@@ -134,6 +136,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -155,6 +158,7 @@ STAGE PLANS:
               output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
               properties:
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/ql/src/test/results/clientpositive/union_stats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/union_stats.q.out b/ql/src/test/results/clientpositive/union_stats.q.out
index ef8d9b5..9ea0b51 100644
--- a/ql/src/test/results/clientpositive/union_stats.q.out
+++ b/ql/src/test/results/clientpositive/union_stats.q.out
@@ -90,6 +90,7 @@ STAGE PLANS:
             properties:
               COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
               bucket_count -1
+              column.name.delimiter ,
               columns key,value
               columns.comments 'default','default'
               columns.types string:string
@@ -110,6 +111,7 @@ STAGE PLANS:
               properties:
                 COLUMN_STATS_ACCURATE {"BASIC_STATS":"true","COLUMN_STATS":{"key":"true","value":"true"}}
                 bucket_count -1
+                column.name.delimiter ,
                 columns key,value
                 columns.comments 'default','default'
                 columns.types string:string

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/if/serde.thrift
----------------------------------------------------------------------
diff --git a/serde/if/serde.thrift b/serde/if/serde.thrift
index 6caad36..1d40d5a 100644
--- a/serde/if/serde.thrift
+++ b/serde/if/serde.thrift
@@ -74,6 +74,7 @@ const string LIST_COLUMNS = "columns";
 const string LIST_COLUMN_TYPES = "columns.types";
 
 const string TIMESTAMP_FORMATS = "timestamp.formats";
+const string COLUMN_NAME_DELIMITER = "column.name.delimiter";
 
 const set<string> PrimitiveTypes  = [
   VOID_TYPE_NAME

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
index 3a675bf..907acf2 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.cpp
@@ -99,6 +99,8 @@ serdeConstants::serdeConstants() {
 
   TIMESTAMP_FORMATS = "timestamp.formats";
 
+  COLUMN_NAME_DELIMITER = "column.name.delimiter";
+
   PrimitiveTypes.insert("void");
   PrimitiveTypes.insert("boolean");
   PrimitiveTypes.insert("tinyint");

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/gen/thrift/gen-cpp/serde_constants.h
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-cpp/serde_constants.h b/serde/src/gen/thrift/gen-cpp/serde_constants.h
index a5f33fb..8785bd2 100644
--- a/serde/src/gen/thrift/gen-cpp/serde_constants.h
+++ b/serde/src/gen/thrift/gen-cpp/serde_constants.h
@@ -59,6 +59,7 @@ class serdeConstants {
   std::string LIST_COLUMNS;
   std::string LIST_COLUMN_TYPES;
   std::string TIMESTAMP_FORMATS;
+  std::string COLUMN_NAME_DELIMITER;
   std::set<std::string>  PrimitiveTypes;
   std::set<std::string>  CollectionTypes;
   std::set<std::string>  IntegralTypes;

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
index 04ed8f5..2578d3e 100644
--- a/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
+++ b/serde/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/serde/serdeConstants.java
@@ -124,6 +124,8 @@ public class serdeConstants {
 
   public static final String TIMESTAMP_FORMATS = "timestamp.formats";
 
+  public static final String COLUMN_NAME_DELIMITER = "column.name.delimiter";
+
   public static final Set<String> PrimitiveTypes = new HashSet<String>();
   static {
     PrimitiveTypes.add("void");

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
index 18c3991..ea2dbbe 100644
--- a/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
+++ b/serde/src/gen/thrift/gen-php/org/apache/hadoop/hive/serde/Types.php
@@ -62,6 +62,7 @@ final class Constant extends \Thrift\Type\TConstant {
   static protected $LIST_COLUMNS;
   static protected $LIST_COLUMN_TYPES;
   static protected $TIMESTAMP_FORMATS;
+  static protected $COLUMN_NAME_DELIMITER;
   static protected $PrimitiveTypes;
   static protected $CollectionTypes;
   static protected $IntegralTypes;
@@ -242,6 +243,10 @@ final class Constant extends \Thrift\Type\TConstant {
     return "timestamp.formats";
   }
 
+  static protected function init_COLUMN_NAME_DELIMITER() {
+    return "column.name.delimiter";
+  }
+
   static protected function init_PrimitiveTypes() {
     return array(
       "void" => true,

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
index fafdc24..e3b24eb 100644
--- a/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
+++ b/serde/src/gen/thrift/gen-py/org_apache_hadoop_hive_serde/constants.py
@@ -53,6 +53,7 @@ UNION_TYPE_NAME = "uniontype"
 LIST_COLUMNS = "columns"
 LIST_COLUMN_TYPES = "columns.types"
 TIMESTAMP_FORMATS = "timestamp.formats"
+COLUMN_NAME_DELIMITER = "column.name.delimiter"
 PrimitiveTypes = set([
   "void",
   "boolean",

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/gen/thrift/gen-rb/serde_constants.rb
----------------------------------------------------------------------
diff --git a/serde/src/gen/thrift/gen-rb/serde_constants.rb b/serde/src/gen/thrift/gen-rb/serde_constants.rb
index 0ce9f27..15efaea 100644
--- a/serde/src/gen/thrift/gen-rb/serde_constants.rb
+++ b/serde/src/gen/thrift/gen-rb/serde_constants.rb
@@ -95,6 +95,8 @@ LIST_COLUMN_TYPES = %q"columns.types"
 
 TIMESTAMP_FORMATS = %q"timestamp.formats"
 
+COLUMN_NAME_DELIMITER = %q"column.name.delimiter"
+
 PrimitiveTypes = Set.new([
   %q"void",
   %q"boolean",

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
index 551a9da..e49b6dc 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/MetadataTypedColumnsetSerDe.java
@@ -103,6 +103,8 @@ public class MetadataTypedColumnsetSerDe extends AbstractSerDe {
         && serdeName.equals("org.apache.hadoop.hive.serde.thrift.columnsetSerDe"))
{
       columnsetSerDe = true;
     }
+    final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER)
? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
     if (columnProperty == null || columnProperty.length() == 0
         || columnsetSerDe) {
       // Hack for tables with no columns
@@ -111,7 +113,7 @@ public class MetadataTypedColumnsetSerDe extends AbstractSerDe {
           .getReflectionObjectInspector(ColumnSet.class,
           ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
     } else {
-      columnNames = Arrays.asList(columnProperty.split(","));
+      columnNames = Arrays.asList(columnProperty.split(columnNameDelimiter));
       cachedObjectInspector = MetadataListStructObjectInspector
           .getInstance(columnNames);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
index 752b907..156b410 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/RegexSerDe.java
@@ -121,8 +121,9 @@ public class RegexSerDe extends AbstractSerDe {
           "This table does not have serde property \"input.regex\"!");
     }
 
-
-    List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+    final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER)
? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
+    List<String> columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     columnTypes = TypeInfoUtils
         .getTypeInfosFromTypeString(columnTypeProperty);
     assert columnNames.size() == columnTypes.size();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
index 7ffc964..6802a05 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/SerDeUtils.java
@@ -63,7 +63,9 @@ public final class SerDeUtils {
   public static final char QUOTE = '"';
   public static final char COLON = ':';
   public static final char COMMA = ',';
-  public static final char COLUMN_COMMENTS_DELIMITER = '\0';
+  // we should use '\0' for COLUMN_NAME_DELIMITER if column name contains COMMA
+  // but we should also take care of the backward compatibility
+  public static char COLUMN_COMMENTS_DELIMITER = '\0';
   public static final String LBRACKET = "[";
   public static final String RBRACKET = "]";
   public static final String LBRACE = "{";

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
index 0be54e0..e5f2c5e 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerDe.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
@@ -96,14 +97,16 @@ public class AvroSerDe extends AbstractSerDe {
     final String columnNameProperty = properties.getProperty(serdeConstants.LIST_COLUMNS);
     final String columnTypeProperty = properties.getProperty(serdeConstants.LIST_COLUMN_TYPES);
     final String columnCommentProperty = properties.getProperty(LIST_COLUMN_COMMENTS,"");
-
+    final String columnNameDelimiter = properties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER)
? properties
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
+        
     if (hasExternalSchema(properties)
         || columnNameProperty == null || columnNameProperty.isEmpty()
         || columnTypeProperty == null || columnTypeProperty.isEmpty()) {
       schema = determineSchemaOrReturnErrorSchema(configuration, properties);
     } else {
       // Get column names and sort order
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
       columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
 
       schema = getSchemaFromCols(properties, columnNames, columnTypes, columnCommentProperty);

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
index b6b23c2..f18585d 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/avro/AvroSerdeUtils.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.mapred.JobConf;
@@ -118,8 +119,10 @@ public class AvroSerdeUtils {
         || columnTypeProperty == null || columnTypeProperty.isEmpty() ) {
         throw new AvroSerdeException(EXCEPTION_MESSAGE);
       }
+      final String columnNameDelimiter = properties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER)
? properties
+          .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
       // Get column names and types
-      List<String> columnNames = Arrays.asList(columnNameProperty.split(","));
+      List<String> columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
       List<TypeInfo> columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
 
       Schema schema = AvroSerDe.getSchemaFromCols(properties, columnNames, columnTypes, columnCommentProperty);

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
index 0a035c6..89e15c3 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/binarysortable/BinarySortableSerDe.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
@@ -152,10 +153,12 @@ public class BinarySortableSerDe extends AbstractSerDe {
     // Get column names and sort order
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
+    final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER)
? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
     if (columnNameProperty.length() == 0) {
       columnNames = new ArrayList<String>();
     } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     }
     if (columnTypeProperty.length() == 0) {
       columnTypes = new ArrayList<TypeInfo>();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
index 7232d0b..ee4bb34 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySerDeParameters.java
@@ -31,6 +31,7 @@ import org.apache.hadoop.hive.common.classification.InterfaceStability.Stable;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyObjectInspectorParameters;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
@@ -139,9 +140,10 @@ public class LazySerDeParameters implements LazyObjectInspectorParameters
{
     String columnTypeProperty = tableProperties.getProperty(serdeConstants.LIST_COLUMN_TYPES);
 
     // Parse the configuration parameters
-
+    String columnNameDelimiter = tableProperties.containsKey(serdeConstants.COLUMN_NAME_DELIMITER)
? tableProperties
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
     if (columnNameProperty != null && columnNameProperty.length() > 0) {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     } else {
       columnNames = new ArrayList<String>();
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
index 99abb5d..56b4ca3 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazybinary/LazyBinarySerDe.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeSpec;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
@@ -108,11 +109,13 @@ public class LazyBinarySerDe extends AbstractSerDe {
       throws SerDeException {
     // Get column names and types
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
+    String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER) ?
tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
     String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
     if (columnNameProperty.length() == 0) {
       columnNames = new ArrayList<String>();
     } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     }
     if (columnTypeProperty.length() == 0) {
       columnTypes = new ArrayList<TypeInfo>();

http://git-wip-us.apache.org/repos/asf/hive/blob/588c3911/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java
----------------------------------------------------------------------
diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java
b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java
index 5c31974..84ed6ba 100644
--- a/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java
+++ b/serde/src/java/org/apache/hadoop/hive/serde2/thrift/ThriftJDBCBinarySerDe.java
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.serde2.SerDeStats;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
@@ -73,10 +74,12 @@ public class ThriftJDBCBinarySerDe extends AbstractSerDe {
 	MAX_BUFFERED_ROWS = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_MAX_FETCH_SIZE);
     String columnNameProperty = tbl.getProperty(serdeConstants.LIST_COLUMNS);
     String columnTypeProperty = tbl.getProperty(serdeConstants.LIST_COLUMN_TYPES);
+    final String columnNameDelimiter = tbl.containsKey(serdeConstants.COLUMN_NAME_DELIMITER)
? tbl
+        .getProperty(serdeConstants.COLUMN_NAME_DELIMITER) : String.valueOf(SerDeUtils.COMMA);
     if (columnNameProperty.length() == 0) {
       columnNames = new ArrayList<String>();
     } else {
-      columnNames = Arrays.asList(columnNameProperty.split(","));
+      columnNames = Arrays.asList(columnNameProperty.split(columnNameDelimiter));
     }
     if (columnTypeProperty.length() == 0) {
       columnTypes = new ArrayList<TypeInfo>();


Mime
View raw message