hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject [38/51] [partial] hive git commit: HIVE-15790: Remove unused beeline golden files (Gunther Hagleitner, reviewed by Sergey Shelukhin)
Date Fri, 03 Feb 2017 21:50:51 GMT
http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/bucket2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/bucket2.q.out b/ql/src/test/results/beelinepositive/bucket2.q.out
deleted file mode 100644
index 4e1db53..0000000
--- a/ql/src/test/results/beelinepositive/bucket2.q.out
+++ /dev/null
@@ -1,477 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/bucket2.q.raw". Enter "record" with no arguments
to stop it.
->>>  !run !!{qFileDirectory}!!/bucket2.q
->>>  set hive.enforce.bucketing = true;
-No rows affected 
->>>  set hive.exec.reducers.max = 1;
-No rows affected 
->>>  
->>>  CREATE TABLE bucket2_1(key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS;
-No rows affected 
->>>  
->>>  explain extended 
-insert overwrite table bucket2_1 
-select * from src;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB
(TOK_TABNAME bucket2_1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            GatherStats: false'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              Reduce Output Operator'
-'                sort order: '
-'                Map-reduce partition columns:'
-'                      expr: UDFToInteger(_col0)'
-'                      type: int'
-'                tag: -1'
-'                value expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: string'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket2.db/src [src]'
-'      Path -> Partition:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket2.db/src '
-'          Partition'
-'            base file name: src'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/bucket2.db/src'
-'              name bucket2.src'
-'              numFiles 1'
-'              numPartitions 0'
-'              numRows 0'
-'              rawDataSize 0'
-'              serialization.ddl struct src { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket2.db/src'
-'                name bucket2.src'
-'                numFiles 1'
-'                numPartitions 0'
-'                numRows 0'
-'                rawDataSize 0'
-'                serialization.ddl struct src { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 5812'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket2.src'
-'            name: bucket2.src'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Select Operator'
-'            expressions:'
-'                  expr: UDFToInteger(_col0)'
-'                  type: int'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 1'
-'              directory: pfile:!!{hive.exec.scratchdir}!!'
-'              NumFilesPerFileSink: 2'
-'              Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  properties:'
-'                    bucket_count 2'
-'                    bucket_field_name key'
-'                    columns key,value'
-'                    columns.types int:string'
-'                    file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                    file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    location !!{hive.metastore.warehouse.dir}!!/bucket2.db/bucket2_1'
-'                    name bucket2.bucket2_1'
-'                    serialization.ddl struct bucket2_1 { i32 key, string value}'
-'                    serialization.format 1'
-'                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    transient_lastDdlTime !!UNIXTIME!!'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: bucket2.bucket2_1'
-'              TotalFiles: 2'
-'              GatherStats: true'
-'              MultiFileSpray: true'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count 2'
-'                bucket_field_name key'
-'                columns key,value'
-'                columns.types int:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket2.db/bucket2_1'
-'                name bucket2.bucket2_1'
-'                serialization.ddl struct bucket2_1 { i32 key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket2.bucket2_1'
-'          tmp directory: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-'      Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-''
-150 rows selected 
->>>  
->>>  insert overwrite table bucket2_1 
-select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  explain 
-select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME bucket2_1) (TOK_TABLEBUCKETSAMPLE 1 2) s))
(TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))
(TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        s '
-'          TableScan'
-'            alias: s'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (((hash(key) & 2147483647) % 2) = 0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: int'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: int'
-'                  sort order: +'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: int'
-'                        expr: _col1'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-50 rows selected 
->>>  
->>>  select * from bucket2_1 tablesample (bucket 1 out of 2) s order by key;
-'key','value'
-'0','val_0'
-'0','val_0'
-'0','val_0'
-'2','val_2'
-'4','val_4'
-'8','val_8'
-'10','val_10'
-'12','val_12'
-'12','val_12'
-'18','val_18'
-'18','val_18'
-'20','val_20'
-'24','val_24'
-'24','val_24'
-'26','val_26'
-'26','val_26'
-'28','val_28'
-'30','val_30'
-'34','val_34'
-'42','val_42'
-'42','val_42'
-'44','val_44'
-'54','val_54'
-'58','val_58'
-'58','val_58'
-'64','val_64'
-'66','val_66'
-'70','val_70'
-'70','val_70'
-'70','val_70'
-'72','val_72'
-'72','val_72'
-'74','val_74'
-'76','val_76'
-'76','val_76'
-'78','val_78'
-'80','val_80'
-'82','val_82'
-'84','val_84'
-'84','val_84'
-'86','val_86'
-'90','val_90'
-'90','val_90'
-'90','val_90'
-'92','val_92'
-'96','val_96'
-'98','val_98'
-'98','val_98'
-'100','val_100'
-'100','val_100'
-'104','val_104'
-'104','val_104'
-'114','val_114'
-'116','val_116'
-'118','val_118'
-'118','val_118'
-'120','val_120'
-'120','val_120'
-'126','val_126'
-'128','val_128'
-'128','val_128'
-'128','val_128'
-'134','val_134'
-'134','val_134'
-'136','val_136'
-'138','val_138'
-'138','val_138'
-'138','val_138'
-'138','val_138'
-'146','val_146'
-'146','val_146'
-'150','val_150'
-'152','val_152'
-'152','val_152'
-'156','val_156'
-'158','val_158'
-'160','val_160'
-'162','val_162'
-'164','val_164'
-'164','val_164'
-'166','val_166'
-'168','val_168'
-'170','val_170'
-'172','val_172'
-'172','val_172'
-'174','val_174'
-'174','val_174'
-'176','val_176'
-'176','val_176'
-'178','val_178'
-'180','val_180'
-'186','val_186'
-'190','val_190'
-'192','val_192'
-'194','val_194'
-'196','val_196'
-'200','val_200'
-'200','val_200'
-'202','val_202'
-'208','val_208'
-'208','val_208'
-'208','val_208'
-'214','val_214'
-'216','val_216'
-'216','val_216'
-'218','val_218'
-'222','val_222'
-'224','val_224'
-'224','val_224'
-'226','val_226'
-'228','val_228'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'238','val_238'
-'238','val_238'
-'242','val_242'
-'242','val_242'
-'244','val_244'
-'248','val_248'
-'252','val_252'
-'256','val_256'
-'256','val_256'
-'258','val_258'
-'260','val_260'
-'262','val_262'
-'266','val_266'
-'272','val_272'
-'272','val_272'
-'274','val_274'
-'278','val_278'
-'278','val_278'
-'280','val_280'
-'280','val_280'
-'282','val_282'
-'282','val_282'
-'284','val_284'
-'286','val_286'
-'288','val_288'
-'288','val_288'
-'292','val_292'
-'296','val_296'
-'298','val_298'
-'298','val_298'
-'298','val_298'
-'302','val_302'
-'306','val_306'
-'308','val_308'
-'310','val_310'
-'316','val_316'
-'316','val_316'
-'316','val_316'
-'318','val_318'
-'318','val_318'
-'318','val_318'
-'322','val_322'
-'322','val_322'
-'332','val_332'
-'336','val_336'
-'338','val_338'
-'342','val_342'
-'342','val_342'
-'344','val_344'
-'344','val_344'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'356','val_356'
-'360','val_360'
-'362','val_362'
-'364','val_364'
-'366','val_366'
-'368','val_368'
-'374','val_374'
-'378','val_378'
-'382','val_382'
-'382','val_382'
-'384','val_384'
-'384','val_384'
-'384','val_384'
-'386','val_386'
-'392','val_392'
-'394','val_394'
-'396','val_396'
-'396','val_396'
-'396','val_396'
-'400','val_400'
-'402','val_402'
-'404','val_404'
-'404','val_404'
-'406','val_406'
-'406','val_406'
-'406','val_406'
-'406','val_406'
-'414','val_414'
-'414','val_414'
-'418','val_418'
-'424','val_424'
-'424','val_424'
-'430','val_430'
-'430','val_430'
-'430','val_430'
-'432','val_432'
-'436','val_436'
-'438','val_438'
-'438','val_438'
-'438','val_438'
-'444','val_444'
-'446','val_446'
-'448','val_448'
-'452','val_452'
-'454','val_454'
-'454','val_454'
-'454','val_454'
-'458','val_458'
-'458','val_458'
-'460','val_460'
-'462','val_462'
-'462','val_462'
-'466','val_466'
-'466','val_466'
-'466','val_466'
-'468','val_468'
-'468','val_468'
-'468','val_468'
-'468','val_468'
-'470','val_470'
-'472','val_472'
-'478','val_478'
-'478','val_478'
-'480','val_480'
-'480','val_480'
-'480','val_480'
-'482','val_482'
-'484','val_484'
-'490','val_490'
-'492','val_492'
-'492','val_492'
-'494','val_494'
-'496','val_496'
-'498','val_498'
-'498','val_498'
-'498','val_498'
-247 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/bucket3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/bucket3.q.out b/ql/src/test/results/beelinepositive/bucket3.q.out
deleted file mode 100644
index 3bcc675..0000000
--- a/ql/src/test/results/beelinepositive/bucket3.q.out
+++ /dev/null
@@ -1,492 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/bucket3.q.raw". Enter "record" with no arguments
to stop it.
->>>  !run !!{qFileDirectory}!!/bucket3.q
->>>  set hive.enforce.bucketing = true;
-No rows affected 
->>>  set hive.exec.reducers.max = 1;
-No rows affected 
->>>  
->>>  CREATE TABLE bucket3_1(key int, value string) partitioned by (ds string) CLUSTERED
BY (key) INTO 2 BUCKETS;
-No rows affected 
->>>  
->>>  explain extended 
-insert overwrite table bucket3_1 partition (ds='1') 
-select * from src;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB
(TOK_TABNAME bucket3_1) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            GatherStats: false'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              Reduce Output Operator'
-'                sort order: '
-'                Map-reduce partition columns:'
-'                      expr: UDFToInteger(_col0)'
-'                      type: int'
-'                tag: -1'
-'                value expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: string'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket3.db/src [src]'
-'      Path -> Partition:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket3.db/src '
-'          Partition'
-'            base file name: src'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/bucket3.db/src'
-'              name bucket3.src'
-'              numFiles 1'
-'              numPartitions 0'
-'              numRows 0'
-'              rawDataSize 0'
-'              serialization.ddl struct src { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket3.db/src'
-'                name bucket3.src'
-'                numFiles 1'
-'                numPartitions 0'
-'                numRows 0'
-'                rawDataSize 0'
-'                serialization.ddl struct src { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 5812'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket3.src'
-'            name: bucket3.src'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Select Operator'
-'            expressions:'
-'                  expr: UDFToInteger(_col0)'
-'                  type: int'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 1'
-'              directory: pfile:!!{hive.exec.scratchdir}!!'
-'              NumFilesPerFileSink: 2'
-'              Static Partition Specification: ds=1/'
-'              Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  properties:'
-'                    bucket_count 2'
-'                    bucket_field_name key'
-'                    columns key,value'
-'                    columns.types int:string'
-'                    file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                    file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    location !!{hive.metastore.warehouse.dir}!!/bucket3.db/bucket3_1'
-'                    name bucket3.bucket3_1'
-'                    partition_columns ds'
-'                    serialization.ddl struct bucket3_1 { i32 key, string value}'
-'                    serialization.format 1'
-'                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    transient_lastDdlTime !!UNIXTIME!!'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: bucket3.bucket3_1'
-'              TotalFiles: 2'
-'              GatherStats: true'
-'              MultiFileSpray: true'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          partition:'
-'            ds 1'
-'          replace: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count 2'
-'                bucket_field_name key'
-'                columns key,value'
-'                columns.types int:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket3.db/bucket3_1'
-'                name bucket3.bucket3_1'
-'                partition_columns ds'
-'                serialization.ddl struct bucket3_1 { i32 key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket3.bucket3_1'
-'          tmp directory: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-'      Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-''
-''
-156 rows selected 
->>>  
->>>  insert overwrite table bucket3_1 partition (ds='1') 
-select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  insert overwrite table bucket3_1 partition (ds='2') 
-select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  explain 
-select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order by key;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME bucket3_1) (TOK_TABLEBUCKETSAMPLE 1 2) s))
(TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))
(TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (TOK_TABLE_OR_COL
key)))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        s '
-'          TableScan'
-'            alias: s'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (((hash(key) & 2147483647) % 2) = 0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: int'
-'                      expr: value'
-'                      type: string'
-'                      expr: ds'
-'                      type: string'
-'                outputColumnNames: _col0, _col1, _col2'
-'                Reduce Output Operator'
-'                  key expressions:'
-'                        expr: _col0'
-'                        type: int'
-'                  sort order: +'
-'                  tag: -1'
-'                  value expressions:'
-'                        expr: _col0'
-'                        type: int'
-'                        expr: _col1'
-'                        type: string'
-'                        expr: _col2'
-'                        type: string'
-'      Reduce Operator Tree:'
-'        Extract'
-'          File Output Operator'
-'            compressed: false'
-'            GlobalTableId: 0'
-'            table:'
-'                input format: org.apache.hadoop.mapred.TextInputFormat'
-'                output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-54 rows selected 
->>>  
->>>  select * from bucket3_1 tablesample (bucket 1 out of 2) s where ds = '1' order
by key;
-'key','value','ds'
-'0','val_0','1'
-'0','val_0','1'
-'0','val_0','1'
-'2','val_2','1'
-'4','val_4','1'
-'8','val_8','1'
-'10','val_10','1'
-'12','val_12','1'
-'12','val_12','1'
-'18','val_18','1'
-'18','val_18','1'
-'20','val_20','1'
-'24','val_24','1'
-'24','val_24','1'
-'26','val_26','1'
-'26','val_26','1'
-'28','val_28','1'
-'30','val_30','1'
-'34','val_34','1'
-'42','val_42','1'
-'42','val_42','1'
-'44','val_44','1'
-'54','val_54','1'
-'58','val_58','1'
-'58','val_58','1'
-'64','val_64','1'
-'66','val_66','1'
-'70','val_70','1'
-'70','val_70','1'
-'70','val_70','1'
-'72','val_72','1'
-'72','val_72','1'
-'74','val_74','1'
-'76','val_76','1'
-'76','val_76','1'
-'78','val_78','1'
-'80','val_80','1'
-'82','val_82','1'
-'84','val_84','1'
-'84','val_84','1'
-'86','val_86','1'
-'90','val_90','1'
-'90','val_90','1'
-'90','val_90','1'
-'92','val_92','1'
-'96','val_96','1'
-'98','val_98','1'
-'98','val_98','1'
-'100','val_100','1'
-'100','val_100','1'
-'104','val_104','1'
-'104','val_104','1'
-'114','val_114','1'
-'116','val_116','1'
-'118','val_118','1'
-'118','val_118','1'
-'120','val_120','1'
-'120','val_120','1'
-'126','val_126','1'
-'128','val_128','1'
-'128','val_128','1'
-'128','val_128','1'
-'134','val_134','1'
-'134','val_134','1'
-'136','val_136','1'
-'138','val_138','1'
-'138','val_138','1'
-'138','val_138','1'
-'138','val_138','1'
-'146','val_146','1'
-'146','val_146','1'
-'150','val_150','1'
-'152','val_152','1'
-'152','val_152','1'
-'156','val_156','1'
-'158','val_158','1'
-'160','val_160','1'
-'162','val_162','1'
-'164','val_164','1'
-'164','val_164','1'
-'166','val_166','1'
-'168','val_168','1'
-'170','val_170','1'
-'172','val_172','1'
-'172','val_172','1'
-'174','val_174','1'
-'174','val_174','1'
-'176','val_176','1'
-'176','val_176','1'
-'178','val_178','1'
-'180','val_180','1'
-'186','val_186','1'
-'190','val_190','1'
-'192','val_192','1'
-'194','val_194','1'
-'196','val_196','1'
-'200','val_200','1'
-'200','val_200','1'
-'202','val_202','1'
-'208','val_208','1'
-'208','val_208','1'
-'208','val_208','1'
-'214','val_214','1'
-'216','val_216','1'
-'216','val_216','1'
-'218','val_218','1'
-'222','val_222','1'
-'224','val_224','1'
-'224','val_224','1'
-'226','val_226','1'
-'228','val_228','1'
-'230','val_230','1'
-'230','val_230','1'
-'230','val_230','1'
-'230','val_230','1'
-'230','val_230','1'
-'238','val_238','1'
-'238','val_238','1'
-'242','val_242','1'
-'242','val_242','1'
-'244','val_244','1'
-'248','val_248','1'
-'252','val_252','1'
-'256','val_256','1'
-'256','val_256','1'
-'258','val_258','1'
-'260','val_260','1'
-'262','val_262','1'
-'266','val_266','1'
-'272','val_272','1'
-'272','val_272','1'
-'274','val_274','1'
-'278','val_278','1'
-'278','val_278','1'
-'280','val_280','1'
-'280','val_280','1'
-'282','val_282','1'
-'282','val_282','1'
-'284','val_284','1'
-'286','val_286','1'
-'288','val_288','1'
-'288','val_288','1'
-'292','val_292','1'
-'296','val_296','1'
-'298','val_298','1'
-'298','val_298','1'
-'298','val_298','1'
-'302','val_302','1'
-'306','val_306','1'
-'308','val_308','1'
-'310','val_310','1'
-'316','val_316','1'
-'316','val_316','1'
-'316','val_316','1'
-'318','val_318','1'
-'318','val_318','1'
-'318','val_318','1'
-'322','val_322','1'
-'322','val_322','1'
-'332','val_332','1'
-'336','val_336','1'
-'338','val_338','1'
-'342','val_342','1'
-'342','val_342','1'
-'344','val_344','1'
-'344','val_344','1'
-'348','val_348','1'
-'348','val_348','1'
-'348','val_348','1'
-'348','val_348','1'
-'348','val_348','1'
-'356','val_356','1'
-'360','val_360','1'
-'362','val_362','1'
-'364','val_364','1'
-'366','val_366','1'
-'368','val_368','1'
-'374','val_374','1'
-'378','val_378','1'
-'382','val_382','1'
-'382','val_382','1'
-'384','val_384','1'
-'384','val_384','1'
-'384','val_384','1'
-'386','val_386','1'
-'392','val_392','1'
-'394','val_394','1'
-'396','val_396','1'
-'396','val_396','1'
-'396','val_396','1'
-'400','val_400','1'
-'402','val_402','1'
-'404','val_404','1'
-'404','val_404','1'
-'406','val_406','1'
-'406','val_406','1'
-'406','val_406','1'
-'406','val_406','1'
-'414','val_414','1'
-'414','val_414','1'
-'418','val_418','1'
-'424','val_424','1'
-'424','val_424','1'
-'430','val_430','1'
-'430','val_430','1'
-'430','val_430','1'
-'432','val_432','1'
-'436','val_436','1'
-'438','val_438','1'
-'438','val_438','1'
-'438','val_438','1'
-'444','val_444','1'
-'446','val_446','1'
-'448','val_448','1'
-'452','val_452','1'
-'454','val_454','1'
-'454','val_454','1'
-'454','val_454','1'
-'458','val_458','1'
-'458','val_458','1'
-'460','val_460','1'
-'462','val_462','1'
-'462','val_462','1'
-'466','val_466','1'
-'466','val_466','1'
-'466','val_466','1'
-'468','val_468','1'
-'468','val_468','1'
-'468','val_468','1'
-'468','val_468','1'
-'470','val_470','1'
-'472','val_472','1'
-'478','val_478','1'
-'478','val_478','1'
-'480','val_480','1'
-'480','val_480','1'
-'480','val_480','1'
-'482','val_482','1'
-'484','val_484','1'
-'490','val_490','1'
-'492','val_492','1'
-'492','val_492','1'
-'494','val_494','1'
-'496','val_496','1'
-'498','val_498','1'
-'498','val_498','1'
-'498','val_498','1'
-247 rows selected 
->>>  !record

http://git-wip-us.apache.org/repos/asf/hive/blob/3890ed65/ql/src/test/results/beelinepositive/bucket4.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/beelinepositive/bucket4.q.out b/ql/src/test/results/beelinepositive/bucket4.q.out
deleted file mode 100644
index 83e664e..0000000
--- a/ql/src/test/results/beelinepositive/bucket4.q.out
+++ /dev/null
@@ -1,474 +0,0 @@
-Saving all output to "!!{outputDirectory}!!/bucket4.q.raw". Enter "record" with no arguments
to stop it.
->>>  !run !!{qFileDirectory}!!/bucket4.q
->>>  set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-No rows affected 
->>>  set hive.enforce.bucketing = true;
-No rows affected 
->>>  set hive.enforce.sorting = true;
-No rows affected 
->>>  set hive.exec.reducers.max = 1;
-No rows affected 
->>>  
->>>  CREATE TABLE bucket4_1(key int, value string) CLUSTERED BY (key) SORTED BY
(key) INTO 2 BUCKETS;
-No rows affected 
->>>  
->>>  explain extended 
-insert overwrite table bucket4_1 
-select * from src;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB
(TOK_TABNAME bucket4_1))) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 depends on stages: Stage-1'
-'  Stage-2 depends on stages: Stage-0'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        src '
-'          TableScan'
-'            alias: src'
-'            GatherStats: false'
-'            Select Operator'
-'              expressions:'
-'                    expr: key'
-'                    type: string'
-'                    expr: value'
-'                    type: string'
-'              outputColumnNames: _col0, _col1'
-'              Reduce Output Operator'
-'                key expressions:'
-'                      expr: UDFToInteger(_col0)'
-'                      type: int'
-'                sort order: +'
-'                Map-reduce partition columns:'
-'                      expr: UDFToInteger(_col0)'
-'                      type: int'
-'                tag: -1'
-'                value expressions:'
-'                      expr: _col0'
-'                      type: string'
-'                      expr: _col1'
-'                      type: string'
-'      Needs Tagging: false'
-'      Path -> Alias:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket4.db/src [src]'
-'      Path -> Partition:'
-'        !!{hive.metastore.warehouse.dir}!!/bucket4.db/src '
-'          Partition'
-'            base file name: src'
-'            input format: org.apache.hadoop.mapred.TextInputFormat'
-'            output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'            properties:'
-'              bucket_count -1'
-'              columns key,value'
-'              columns.types string:string'
-'              file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'              file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              location !!{hive.metastore.warehouse.dir}!!/bucket4.db/src'
-'              name bucket4.src'
-'              numFiles 1'
-'              numPartitions 0'
-'              numRows 0'
-'              rawDataSize 0'
-'              serialization.ddl struct src { string key, string value}'
-'              serialization.format 1'
-'              serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              totalSize 5812'
-'              transient_lastDdlTime !!UNIXTIME!!'
-'            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'          '
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                bucket_count -1'
-'                columns key,value'
-'                columns.types string:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket4.db/src'
-'                name bucket4.src'
-'                numFiles 1'
-'                numPartitions 0'
-'                numRows 0'
-'                rawDataSize 0'
-'                serialization.ddl struct src { string key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                totalSize 5812'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket4.src'
-'            name: bucket4.src'
-'      Reduce Operator Tree:'
-'        Extract'
-'          Select Operator'
-'            expressions:'
-'                  expr: UDFToInteger(_col0)'
-'                  type: int'
-'                  expr: _col1'
-'                  type: string'
-'            outputColumnNames: _col0, _col1'
-'            File Output Operator'
-'              compressed: false'
-'              GlobalTableId: 1'
-'              directory: pfile:!!{hive.exec.scratchdir}!!'
-'              NumFilesPerFileSink: 2'
-'              Stats Publishing Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-'              table:'
-'                  input format: org.apache.hadoop.mapred.TextInputFormat'
-'                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                  properties:'
-'                    SORTBUCKETCOLSPREFIX TRUE'
-'                    bucket_count 2'
-'                    bucket_field_name key'
-'                    columns key,value'
-'                    columns.types int:string'
-'                    file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                    file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                    location !!{hive.metastore.warehouse.dir}!!/bucket4.db/bucket4_1'
-'                    name bucket4.bucket4_1'
-'                    serialization.ddl struct bucket4_1 { i32 key, string value}'
-'                    serialization.format 1'
-'                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                    transient_lastDdlTime !!UNIXTIME!!'
-'                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                  name: bucket4.bucket4_1'
-'              TotalFiles: 2'
-'              GatherStats: true'
-'              MultiFileSpray: true'
-''
-'  Stage: Stage-0'
-'    Move Operator'
-'      tables:'
-'          replace: true'
-'          source: pfile:!!{hive.exec.scratchdir}!!'
-'          table:'
-'              input format: org.apache.hadoop.mapred.TextInputFormat'
-'              output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'              properties:'
-'                SORTBUCKETCOLSPREFIX TRUE'
-'                bucket_count 2'
-'                bucket_field_name key'
-'                columns key,value'
-'                columns.types int:string'
-'                file.inputformat org.apache.hadoop.mapred.TextInputFormat'
-'                file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-'                location !!{hive.metastore.warehouse.dir}!!/bucket4.db/bucket4_1'
-'                name bucket4.bucket4_1'
-'                serialization.ddl struct bucket4_1 { i32 key, string value}'
-'                serialization.format 1'
-'                serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'                transient_lastDdlTime !!UNIXTIME!!'
-'              serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe'
-'              name: bucket4.bucket4_1'
-'          tmp directory: pfile:!!{hive.exec.scratchdir}!!'
-''
-'  Stage: Stage-2'
-'    Stats-Aggr Operator'
-'      Stats Aggregation Key Prefix: pfile:!!{hive.exec.scratchdir}!!'
-''
-''
-156 rows selected 
->>>  
->>>  insert overwrite table bucket4_1 
-select * from src;
-'_col0','_col1'
-No rows selected 
->>>  
->>>  explain 
-select * from bucket4_1 tablesample (bucket 1 out of 2) s;
-'Explain'
-'ABSTRACT SYNTAX TREE:'
-'  (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME bucket4_1) (TOK_TABLEBUCKETSAMPLE 1 2) s))
(TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF))))'
-''
-'STAGE DEPENDENCIES:'
-'  Stage-1 is a root stage'
-'  Stage-0 is a root stage'
-''
-'STAGE PLANS:'
-'  Stage: Stage-1'
-'    Map Reduce'
-'      Alias -> Map Operator Tree:'
-'        s '
-'          TableScan'
-'            alias: s'
-'            Filter Operator'
-'              predicate:'
-'                  expr: (((hash(key) & 2147483647) % 2) = 0)'
-'                  type: boolean'
-'              Select Operator'
-'                expressions:'
-'                      expr: key'
-'                      type: int'
-'                      expr: value'
-'                      type: string'
-'                outputColumnNames: _col0, _col1'
-'                File Output Operator'
-'                  compressed: false'
-'                  GlobalTableId: 0'
-'                  table:'
-'                      input format: org.apache.hadoop.mapred.TextInputFormat'
-'                      output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
-''
-'  Stage: Stage-0'
-'    Fetch Operator'
-'      limit: -1'
-''
-''
-37 rows selected 
->>>  
->>>  select * from bucket4_1 tablesample (bucket 1 out of 2) s;
-'key','value'
-'0','val_0'
-'0','val_0'
-'0','val_0'
-'2','val_2'
-'4','val_4'
-'8','val_8'
-'10','val_10'
-'12','val_12'
-'12','val_12'
-'18','val_18'
-'18','val_18'
-'20','val_20'
-'24','val_24'
-'24','val_24'
-'26','val_26'
-'26','val_26'
-'28','val_28'
-'30','val_30'
-'34','val_34'
-'42','val_42'
-'42','val_42'
-'44','val_44'
-'54','val_54'
-'58','val_58'
-'58','val_58'
-'64','val_64'
-'66','val_66'
-'70','val_70'
-'70','val_70'
-'70','val_70'
-'72','val_72'
-'72','val_72'
-'74','val_74'
-'76','val_76'
-'76','val_76'
-'78','val_78'
-'80','val_80'
-'82','val_82'
-'84','val_84'
-'84','val_84'
-'86','val_86'
-'90','val_90'
-'90','val_90'
-'90','val_90'
-'92','val_92'
-'96','val_96'
-'98','val_98'
-'98','val_98'
-'100','val_100'
-'100','val_100'
-'104','val_104'
-'104','val_104'
-'114','val_114'
-'116','val_116'
-'118','val_118'
-'118','val_118'
-'120','val_120'
-'120','val_120'
-'126','val_126'
-'128','val_128'
-'128','val_128'
-'128','val_128'
-'134','val_134'
-'134','val_134'
-'136','val_136'
-'138','val_138'
-'138','val_138'
-'138','val_138'
-'138','val_138'
-'146','val_146'
-'146','val_146'
-'150','val_150'
-'152','val_152'
-'152','val_152'
-'156','val_156'
-'158','val_158'
-'160','val_160'
-'162','val_162'
-'164','val_164'
-'164','val_164'
-'166','val_166'
-'168','val_168'
-'170','val_170'
-'172','val_172'
-'172','val_172'
-'174','val_174'
-'174','val_174'
-'176','val_176'
-'176','val_176'
-'178','val_178'
-'180','val_180'
-'186','val_186'
-'190','val_190'
-'192','val_192'
-'194','val_194'
-'196','val_196'
-'200','val_200'
-'200','val_200'
-'202','val_202'
-'208','val_208'
-'208','val_208'
-'208','val_208'
-'214','val_214'
-'216','val_216'
-'216','val_216'
-'218','val_218'
-'222','val_222'
-'224','val_224'
-'224','val_224'
-'226','val_226'
-'228','val_228'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'230','val_230'
-'238','val_238'
-'238','val_238'
-'242','val_242'
-'242','val_242'
-'244','val_244'
-'248','val_248'
-'252','val_252'
-'256','val_256'
-'256','val_256'
-'258','val_258'
-'260','val_260'
-'262','val_262'
-'266','val_266'
-'272','val_272'
-'272','val_272'
-'274','val_274'
-'278','val_278'
-'278','val_278'
-'280','val_280'
-'280','val_280'
-'282','val_282'
-'282','val_282'
-'284','val_284'
-'286','val_286'
-'288','val_288'
-'288','val_288'
-'292','val_292'
-'296','val_296'
-'298','val_298'
-'298','val_298'
-'298','val_298'
-'302','val_302'
-'306','val_306'
-'308','val_308'
-'310','val_310'
-'316','val_316'
-'316','val_316'
-'316','val_316'
-'318','val_318'
-'318','val_318'
-'318','val_318'
-'322','val_322'
-'322','val_322'
-'332','val_332'
-'336','val_336'
-'338','val_338'
-'342','val_342'
-'342','val_342'
-'344','val_344'
-'344','val_344'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'348','val_348'
-'356','val_356'
-'360','val_360'
-'362','val_362'
-'364','val_364'
-'366','val_366'
-'368','val_368'
-'374','val_374'
-'378','val_378'
-'382','val_382'
-'382','val_382'
-'384','val_384'
-'384','val_384'
-'384','val_384'
-'386','val_386'
-'392','val_392'
-'394','val_394'
-'396','val_396'
-'396','val_396'
-'396','val_396'
-'400','val_400'
-'402','val_402'
-'404','val_404'
-'404','val_404'
-'406','val_406'
-'406','val_406'
-'406','val_406'
-'406','val_406'
-'414','val_414'
-'414','val_414'
-'418','val_418'
-'424','val_424'
-'424','val_424'
-'430','val_430'
-'430','val_430'
-'430','val_430'
-'432','val_432'
-'436','val_436'
-'438','val_438'
-'438','val_438'
-'438','val_438'
-'444','val_444'
-'446','val_446'
-'448','val_448'
-'452','val_452'
-'454','val_454'
-'454','val_454'
-'454','val_454'
-'458','val_458'
-'458','val_458'
-'460','val_460'
-'462','val_462'
-'462','val_462'
-'466','val_466'
-'466','val_466'
-'466','val_466'
-'468','val_468'
-'468','val_468'
-'468','val_468'
-'468','val_468'
-'470','val_470'
-'472','val_472'
-'478','val_478'
-'478','val_478'
-'480','val_480'
-'480','val_480'
-'480','val_480'
-'482','val_482'
-'484','val_484'
-'490','val_490'
-'492','val_492'
-'492','val_492'
-'494','val_494'
-'496','val_496'
-'498','val_498'
-'498','val_498'
-'498','val_498'
-247 rows selected 
->>>  !record


Mime
View raw message