hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From the...@apache.org
Subject svn commit: r1613310 [1/2] - in /hive/trunk: contrib/src/test/results/clientnegative/ contrib/src/test/results/clientpositive/ itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apa...
Date Thu, 24 Jul 2014 22:08:24 GMT
Author: thejas
Date: Thu Jul 24 22:08:23 2014
New Revision: 1613310

URL: http://svn.apache.org/r1613310
Log:
HIVE-7451 : pass function name in create/drop function to authorization api (Thejas Nair, reviewed by Jason Dere)

Modified:
    hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out
    hive/trunk/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out
    hive/trunk/contrib/src/test/results/clientnegative/udtf_explode2.q.out
    hive/trunk/contrib/src/test/results/clientpositive/dboutput.q.out
    hive/trunk/contrib/src/test/results/clientpositive/lateral_view_explode2.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udf_row_sequence.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udtf_explode2.q.out
    hive/trunk/contrib/src/test/results/clientpositive/udtf_output_on_close.q.out
    hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
    hive/trunk/ql/src/test/results/clientnegative/authorization_addjar.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_addpartition.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner_default.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_compile.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_create_func2.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_create_macro1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_createview.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_ctas.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_deletejar.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_dfs.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_droppartition.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_fail_8.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail_nogrant.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_insert_noinspriv.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_insert_noselectpriv.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_insertoverwrite_nodel.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_rename.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_alter_tab_serdeprop.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_drop_tab.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_not_owner_drop_view.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_priv_current_role_neg.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_rolehierarchy_privs.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_select.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_select_view.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_show_parts_nosel.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorization_truncate.q.out
    hive/trunk/ql/src/test/results/clientnegative/authorize_create_tbl.q.out
    hive/trunk/ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out
    hive/trunk/ql/src/test/results/clientnegative/create_function_nonudf_class.q.out
    hive/trunk/ql/src/test/results/clientnegative/create_udaf_failure.q.out
    hive/trunk/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out
    hive/trunk/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out
    hive/trunk/ql/src/test/results/clientnegative/drop_native_udf.q.out
    hive/trunk/ql/src/test/results/clientnegative/temp_table_authorize_create_tbl.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_function_does_not_implement_udf.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_local_resource.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_test_error.q.out
    hive/trunk/ql/src/test/results/clientnegative/udf_test_error_reduce.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_admin_almighty2.q.out
    hive/trunk/ql/src/test/results/clientpositive/authorization_create_func1.q.out
    hive/trunk/ql/src/test/results/clientpositive/autogen_colalias.q.out
    hive/trunk/ql/src/test/results/clientpositive/compile_processor.q.out
    hive/trunk/ql/src/test/results/clientpositive/create_func1.q.out
    hive/trunk/ql/src/test/results/clientpositive/create_genericudaf.q.out
    hive/trunk/ql/src/test/results/clientpositive/create_genericudf.q.out
    hive/trunk/ql/src/test/results/clientpositive/create_udaf.q.out
    hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
    hive/trunk/ql/src/test/results/clientpositive/drop_udf.q.out
    hive/trunk/ql/src/test/results/clientpositive/ptf_register_tblfn.q.out
    hive/trunk/ql/src/test/results/clientpositive/ptf_streaming.q.out
    hive/trunk/ql/src/test/results/clientpositive/udaf_sum_list.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_compare_java_string.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_context_aware.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_logic_java_boolean.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_testlength.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_testlength2.q.out
    hive/trunk/ql/src/test/results/clientpositive/udf_using.q.out
    hive/trunk/ql/src/test/results/clientpositive/windowing_udaf2.q.out

Modified: hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientnegative/case_with_row_sequence.q.out Thu Jul 24 22:08:23 2014
@@ -5,11 +5,11 @@ POSTHOOK: type: DROPFUNCTION
 PREHOOK: query: create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: row_sequence
 POSTHOOK: query: create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: row_sequence
 PREHOOK: query: -- make sure a stateful function inside of CASE throws an exception
 -- since the short-circuiting requirements are contradictory
 SELECT CASE WHEN 3 > 2 THEN 10 WHEN row_sequence() > 5 THEN 20 ELSE 30 END

Modified: hive/trunk/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientnegative/invalid_row_sequence.q.out Thu Jul 24 22:08:23 2014
@@ -2,18 +2,18 @@ PREHOOK: query: -- Verify that a statefu
 
 drop temporary function row_sequence
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: row_sequence
 POSTHOOK: query: -- Verify that a stateful UDF cannot be used outside of the SELECT list
 
 drop temporary function row_sequence
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: row_sequence
 PREHOOK: query: create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: row_sequence
 POSTHOOK: query: create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: row_sequence
 FAILED: SemanticException [Error 10084]: Stateful UDF's can only be invoked in the SELECT list

Modified: hive/trunk/contrib/src/test/results/clientnegative/udtf_explode2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientnegative/udtf_explode2.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientnegative/udtf_explode2.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientnegative/udtf_explode2.q.out Thu Jul 24 22:08:23 2014
@@ -1,7 +1,7 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: explode2
 POSTHOOK: query: CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: explode2
 FAILED: SemanticException [Error 10083]: The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF expected 2 aliases but got 1

Modified: hive/trunk/contrib/src/test/results/clientpositive/dboutput.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/dboutput.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/dboutput.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/dboutput.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: dboutput
 POSTHOOK: query: CREATE TEMPORARY FUNCTION dboutput AS 'org.apache.hadoop.hive.contrib.genericudf.example.GenericUDFDBOutput'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: dboutput
 PREHOOK: query: DESCRIBE FUNCTION dboutput
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION dboutput
@@ -186,7 +186,7 @@ POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 PREHOOK: query: DROP TEMPORARY FUNCTION dboutput
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: dboutput
 POSTHOOK: query: DROP TEMPORARY FUNCTION dboutput
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: dboutput

Modified: hive/trunk/contrib/src/test/results/clientpositive/lateral_view_explode2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/lateral_view_explode2.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/lateral_view_explode2.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/lateral_view_explode2.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: explode2
 POSTHOOK: query: CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: explode2
 PREHOOK: query: SELECT col1, col2 FROM src LATERAL VIEW explode2(array(1,2,3)) myTable AS col1, col2 LIMIT 3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -17,7 +17,7 @@ POSTHOOK: Input: default@src
 3	3
 PREHOOK: query: DROP TEMPORARY FUNCTION explode2
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: explode2
 POSTHOOK: query: DROP TEMPORARY FUNCTION explode2
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: explode2

Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_avg.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_avg AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_avg
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_avg AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleAvg'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_avg
 PREHOOK: query: EXPLAIN
 SELECT example_avg(substr(value,5)),
        example_avg(IF(substr(value,5) > 250, NULL, substr(value,5)))
@@ -77,7 +77,7 @@ POSTHOOK: Input: default@src
 260.182	134.82926829268294
 PREHOOK: query: DROP TEMPORARY FUNCTION example_avg
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_avg
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_avg
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_avg

Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_group_concat.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_group_concat AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_group_concat
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_group_concat AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleGroupConcat'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_group_concat
 PREHOOK: query: EXPLAIN
 SELECT substr(value,5,1), example_group_concat("(", key, ":", value, ")")
 FROM src
@@ -90,7 +90,7 @@ POSTHOOK: Input: default@src
 9	(90:val_90)(90:val_90)(90:val_90)(92:val_92)(95:val_95)(95:val_95)(96:val_96)(97:val_97)(97:val_97)(98:val_98)(98:val_98)(9:val_9)
 PREHOOK: query: DROP TEMPORARY FUNCTION example_group_concat
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_group_concat
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_group_concat
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_group_concat

Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_max AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_max
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_max AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMax'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_max
 PREHOOK: query: DESCRIBE FUNCTION EXTENDED example_max
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED example_max
@@ -82,7 +82,7 @@ POSTHOOK: Input: default@src
 98	98
 PREHOOK: query: DROP TEMPORARY FUNCTION example_max
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_max
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_max
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_max

Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_max_n.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_max_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxN'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_max_n
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_max_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMaxN'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_max_n
 PREHOOK: query: EXPLAIN
 SELECT example_max_n(substr(value,5),10),
        example_max_n(IF(substr(value,5) > 250, NULL, substr(value,5)),10)
@@ -77,7 +77,7 @@ POSTHOOK: Input: default@src
 [498.0,498.0,498.0,497.0,496.0,495.0,494.0,493.0,492.0,492.0]	[249.0,248.0,247.0,244.0,242.0,242.0,241.0,239.0,239.0,238.0]
 PREHOOK: query: DROP TEMPORARY FUNCTION example_max_n
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_max_n
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_max_n
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_max_n

Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_min AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_min
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_min AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMin'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_min
 PREHOOK: query: DESCRIBE FUNCTION EXTENDED example_min
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED example_min
@@ -82,7 +82,7 @@ POSTHOOK: Input: default@src
 0	0
 PREHOOK: query: DROP TEMPORARY FUNCTION example_min
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_min
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_min
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_min

Modified: hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udaf_example_min_n.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_min_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_min_n
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_min_n AS 'org.apache.hadoop.hive.contrib.udaf.example.UDAFExampleMinN'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_min_n
 PREHOOK: query: EXPLAIN
 SELECT example_min_n(substr(value,5),10),
        example_min_n(IF(substr(value,5) < 250, NULL, substr(value,5)),10)
@@ -77,7 +77,7 @@ POSTHOOK: Input: default@src
 [0.0,0.0,0.0,2.0,4.0,5.0,5.0,5.0,8.0,9.0]	[252.0,255.0,255.0,256.0,256.0,257.0,258.0,260.0,262.0,263.0]
 PREHOOK: query: DROP TEMPORARY FUNCTION example_min_n
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_min_n
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_min_n
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_min_n

Modified: hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udf_example_add.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_add
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_add AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleAdd'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_add
 PREHOOK: query: EXPLAIN
 SELECT example_add(1, 2),
        example_add(1, 2, 3),
@@ -81,7 +81,7 @@ POSTHOOK: Input: default@src
 3	6	10	3.3000000000000003	6.6	11.0	10.4
 PREHOOK: query: DROP TEMPORARY FUNCTION example_add
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_add
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_add
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_add

Modified: hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udf_example_arraymapstruct.q.out Thu Jul 24 22:08:23 2014
@@ -1,21 +1,21 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_arraysum    AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_arraysum
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_arraysum    AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleArraySum'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_arraysum
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_mapconcat   AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_mapconcat
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_mapconcat   AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleMapConcat'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_mapconcat
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_structprint AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleStructPrint'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_structprint
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_structprint AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleStructPrint'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_structprint
 PREHOOK: query: EXPLAIN
 SELECT example_arraysum(lint), example_mapconcat(mstringstring), example_structprint(lintstring[0])
 FROM src_thrift
@@ -76,19 +76,19 @@ POSTHOOK: Input: default@src_thrift
 NULL	NULL	NULL
 PREHOOK: query: DROP TEMPORARY FUNCTION example_arraysum
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_arraysum
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_arraysum
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_arraysum
 PREHOOK: query: DROP TEMPORARY FUNCTION example_mapconcat
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_mapconcat
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_mapconcat
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_mapconcat
 PREHOOK: query: DROP TEMPORARY FUNCTION example_structprint
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_structprint
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_structprint
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_structprint

Modified: hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udf_example_format.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_format
 POSTHOOK: query: CREATE TEMPORARY FUNCTION example_format AS 'org.apache.hadoop.hive.contrib.udf.example.UDFExampleFormat'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_format
 PREHOOK: query: EXPLAIN
 SELECT example_format("abc"),
        example_format("%1$s", 1.1),
@@ -69,7 +69,7 @@ POSTHOOK: Input: default@src
 abc	1.1	1.1 1.200000e+00	a 12 10
 PREHOOK: query: DROP TEMPORARY FUNCTION example_format
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: example_format
 POSTHOOK: query: DROP TEMPORARY FUNCTION example_format
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: example_format

Modified: hive/trunk/contrib/src/test/results/clientpositive/udf_row_sequence.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udf_row_sequence.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udf_row_sequence.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udf_row_sequence.q.out Thu Jul 24 22:08:23 2014
@@ -13,11 +13,11 @@ POSTHOOK: type: DROPFUNCTION
 PREHOOK: query: create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: row_sequence
 POSTHOOK: query: create temporary function row_sequence as 
 'org.apache.hadoop.hive.contrib.udf.UDFRowSequence'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: row_sequence
 PREHOOK: query: DESCRIBE FUNCTION EXTENDED row_sequence
 PREHOOK: type: DESCFUNCTION
 POSTHOOK: query: DESCRIBE FUNCTION EXTENDED row_sequence
@@ -643,7 +643,7 @@ POSTHOOK: Input: default@src
 119	false
 PREHOOK: query: drop temporary function row_sequence
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: row_sequence
 POSTHOOK: query: drop temporary function row_sequence
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: row_sequence

Modified: hive/trunk/contrib/src/test/results/clientpositive/udtf_explode2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udtf_explode2.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udtf_explode2.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udtf_explode2.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: explode2
 POSTHOOK: query: CREATE TEMPORARY FUNCTION explode2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFExplode2'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: explode2
 PREHOOK: query: SELECT explode2(array(1,2,3)) AS (col1, col2) FROM src LIMIT 3
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src
@@ -17,7 +17,7 @@ POSTHOOK: Input: default@src
 3	3
 PREHOOK: query: DROP TEMPORARY FUNCTION explode2
 PREHOOK: type: DROPFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: explode2
 POSTHOOK: query: DROP TEMPORARY FUNCTION explode2
 POSTHOOK: type: DROPFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: explode2

Modified: hive/trunk/contrib/src/test/results/clientpositive/udtf_output_on_close.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/test/results/clientpositive/udtf_output_on_close.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/contrib/src/test/results/clientpositive/udtf_output_on_close.q.out (original)
+++ hive/trunk/contrib/src/test/results/clientpositive/udtf_output_on_close.q.out Thu Jul 24 22:08:23 2014
@@ -1,9 +1,9 @@
 PREHOOK: query: CREATE TEMPORARY FUNCTION udtfCount2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2'
 PREHOOK: type: CREATEFUNCTION
-PREHOOK: Output: database:default
+PREHOOK: Output: udtfcount2
 POSTHOOK: query: CREATE TEMPORARY FUNCTION udtfCount2 AS 'org.apache.hadoop.hive.contrib.udtf.example.GenericUDTFCount2'
 POSTHOOK: type: CREATEFUNCTION
-POSTHOOK: Output: database:default
+POSTHOOK: Output: udtfcount2
 PREHOOK: query: SELECT udtfCount2(key) AS count FROM src
 PREHOOK: type: QUERY
 PREHOOK: Input: default@src

Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java (original)
+++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/authorization/TestJdbcWithSQLAuthorization.java Thu Jul 24 22:08:23 2014
@@ -125,15 +125,14 @@ public class TestJdbcWithSQLAuthorizatio
       // create tables
       try {
         stmt.execute("dfs -ls /tmp/");
-      } catch (SQLException e){
+      } catch (SQLException e) {
         caughtException = true;
-        String msg = "Principal [name=user1, type=USER] does not have following "
-            + "privileges on Object [type=COMMAND_PARAMS, name=[-ls, /tmp/]] for operation "
-            + "DFS : [ADMIN PRIVILEGE]";
+        String msg = "Permission denied: Principal [name=user1, type=USER] does not have "
+            + "following privileges for operation DFS [[ADMIN PRIVILEGE] on "
+            + "Object [type=COMMAND_PARAMS, name=[-ls, /tmp/]]]";
         assertTrue("Checking content of error message:" + e.getMessage(),
             e.getMessage().contains(msg));
-      }
-      finally {
+      } finally {
         stmt.close();
         hs2Conn.close();
       }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Thu Jul 24 22:08:23 2014
@@ -724,18 +724,21 @@ public class Driver implements CommandPr
 
       //support for authorization on partitions needs to be added
       String dbname = null;
-      String tableURI = null;
+      String objName = null;
       switch(privObject.getType()){
       case DATABASE:
         dbname = privObject.getDatabase() == null ? null : privObject.getDatabase().getName();
         break;
       case TABLE:
         dbname = privObject.getTable() == null ? null : privObject.getTable().getDbName();
-        tableURI = privObject.getTable() == null ? null : privObject.getTable().getTableName();
+        objName = privObject.getTable() == null ? null : privObject.getTable().getTableName();
         break;
       case DFS_DIR:
       case LOCAL_DIR:
-        tableURI = privObject.getD();
+        objName = privObject.getD();
+        break;
+      case FUNCTION:
+        objName = privObject.getFunctionName();
         break;
       case DUMMYPARTITION:
       case PARTITION:
@@ -745,7 +748,7 @@ public class Driver implements CommandPr
           throw new AssertionError("Unexpected object type");
       }
       HivePrivObjectActionType actionType = AuthorizationUtils.getActionType(privObject);
-      HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, tableURI,
+      HivePrivilegeObject hPrivObject = new HivePrivilegeObject(privObjType, dbname, objName,
           actionType);
       hivePrivobjs.add(hPrivObject);
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Thu Jul 24 22:08:23 2014
@@ -3170,7 +3170,7 @@ public class DDLTask extends Task<DDLWor
       HivePrincipal grantor = privilege.getGrantorPrincipal();
 
       appendNonNull(builder, resource.getDbname(), true);
-      appendNonNull(builder, resource.getTableViewURI());
+      appendNonNull(builder, resource.getObjectName());
       appendNonNull(builder, resource.getPartKeys());
       appendNonNull(builder, resource.getColumns());
       appendNonNull(builder, principal.getName());

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/Entity.java Thu Jul 24 22:08:23 2014
@@ -40,7 +40,7 @@ public class Entity implements Serializa
    * The type of the entity.
    */
   public static enum Type {
-    DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR
+    DATABASE, TABLE, PARTITION, DUMMYPARTITION, DFS_DIR, LOCAL_DIR, FUNCTION
   }
 
   /**
@@ -64,11 +64,17 @@ public class Entity implements Serializa
   private Partition p;
 
   /**
-   * The directory if this is a directory.
+   * The directory if this is a directory
    */
   private String d;
 
   /**
+   * An object that is represented as a String
+   * Currently used for functions
+   */
+  private String stringObject;
+
+  /**
    * This is derived from t and p, but we need to serialize this field to make
    * sure Entity.hashCode() does not need to recursively read into t and p.
    */
@@ -136,6 +142,21 @@ public class Entity implements Serializa
     this.d = d;
   }
 
+  public String getFunctionName() {
+    if (typ == Type.FUNCTION) {
+      return stringObject;
+    }
+    return null;
+  }
+
+  public void setFunctionName(String funcName) {
+    if (typ != Type.FUNCTION) {
+      throw new IllegalArgumentException(
+          "Set function can't be called on entity if the entity type is not " + Type.FUNCTION);
+    }
+    this.stringObject = funcName;
+  }
+
   /**
    * Only used by serialization.
    */
@@ -210,6 +231,24 @@ public class Entity implements Serializa
   }
 
   /**
+   * Create an entity representing a object with given name, database namespace and type
+   * @param database - database namespace
+   * @param strObj - object name as string
+   * @param type - the entity type. this constructor only supports FUNCTION type currently
+   */
+  public Entity(Database database, String strObj, Type type) {
+    if (type != Type.FUNCTION) {
+      throw new IllegalArgumentException("This constructor is supported only for type:"
+          + Type.FUNCTION);
+    }
+    this.database = database;
+    this.stringObject = strObj;
+    this.typ = type;
+    this.complete = true;
+    name = computeName();
+  }
+
+  /**
    * Get the parameter map of the Entity.
    */
   public Map<String, String> getParameters() {
@@ -293,6 +332,8 @@ public class Entity implements Serializa
       return t.getDbName() + "@" + t.getTableName() + "@" + p.getName();
     case DUMMYPARTITION:
       return p.getName();
+    case FUNCTION:
+      return stringObject;
     default:
       return d;
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/hooks/WriteEntity.java Thu Jul 24 22:08:23 2014
@@ -82,6 +82,19 @@ public class WriteEntity extends Entity 
   }
 
   /**
+   * Constructor for objects represented as String.
+   * Currently applicable only for function names.
+   * @param db
+   * @param objName
+   * @param type
+   * @param writeType
+   */
+  public WriteEntity(Database db, String objName, Type type, WriteType writeType) {
+    super(db, objName, type);
+    this.writeType = writeType;
+  }
+
+  /**
    * Constructor for a partition.
    *
    * @param p

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java Thu Jul 24 22:08:23 2014
@@ -173,7 +173,7 @@ public class SessionHiveMetaStoreClient 
     return tables;
   }
 
-  
+
   @Override
   public boolean tableExists(String databaseName, String tableName) throws MetaException,
   TException, UnknownDBException {
@@ -331,7 +331,7 @@ public class SessionHiveMetaStoreClient 
               " is not writable by " + conf.getUser());
         }
       } catch (IOException err) {
-        MetaException metaException = 
+        MetaException metaException =
             new MetaException("Error checking temp table path for " + table.getTableName());
         metaException.initCause(err);
         throw metaException;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java Thu Jul 24 22:08:23 2014
@@ -24,7 +24,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.ResourceType;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
@@ -32,14 +31,13 @@ import org.apache.hadoop.hive.ql.ErrorMs
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
-import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
-import org.apache.hadoop.hive.ql.session.SessionState;
 
 /**
  * FunctionSemanticAnalyzer.
@@ -78,7 +76,7 @@ public class FunctionSemanticAnalyzer ex
 
     // find any referenced resources
     List<ResourceUri> resources = getResourceList(ast);
-    
+
     CreateFunctionDesc desc =
         new CreateFunctionDesc(functionName, isTemporaryFunction, className, resources);
     rootTasks.add(TaskFactory.get(new FunctionWork(desc), conf));
@@ -152,15 +150,22 @@ public class FunctionSemanticAnalyzer ex
   }
 
   /**
-   * Add write entities to the semantic analyzer to restrict function creation to priviliged users.
+   * Add write entities to the semantic analyzer to restrict function creation to privileged users.
    */
   private void addEntities(String functionName, boolean isTemporaryFunction)
       throws SemanticException {
+    // If the function is being added under a database 'namespace', then add an entity representing
+    // the database (only applicable to permanent/metastore functions).
+    // We also add a second entity representing the function name.
+    // The authorization api implementation can decide which entities it wants to use to
+    // authorize the create/drop function call.
+
+    // Add the relevant database 'namespace' as a WriteEntity
     Database database = null;
-    if (isTemporaryFunction) {
-      // This means temp function creation is also restricted.
-      database = getDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
-    } else {
+
+    // temporary functions don't have any database 'namespace' associated with it,
+    // it matters only for permanent functions
+    if (!isTemporaryFunction) {
       try {
         String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(functionName);
         String dbName = qualifiedNameParts[0];
@@ -173,5 +178,9 @@ public class FunctionSemanticAnalyzer ex
     if (database != null) {
       outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
     }
+
+    // Add the function name as a WriteEntity
+    outputs.add(new WriteEntity(database, functionName, Type.FUNCTION,
+        WriteEntity.WriteType.DDL_NO_LOCK));
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/AuthorizationUtils.java Thu Jul 24 22:08:23 2014
@@ -99,6 +99,8 @@ public class AuthorizationUtils {
     case PARTITION:
     case DUMMYPARTITION: //need to determine if a different type is needed for dummy partitions
       return HivePrivilegeObjectType.PARTITION;
+    case FUNCTION:
+      return HivePrivilegeObjectType.FUNCTION;
     default:
       return null;
     }
@@ -253,12 +255,8 @@ public class AuthorizationUtils {
       return HiveObjectType.PARTITION;
     case COLUMN:
       return HiveObjectType.COLUMN;
-    case LOCAL_URI:
-    case DFS_URI:
-      throw new HiveException("Unsupported type " + type);
     default:
-      //should not happen as we have accounted for all types
-      throw new AssertionError("Unsupported type " + type);
+      throw new HiveException("Unsupported type " + type);
     }
   }
 
@@ -301,7 +299,7 @@ public class AuthorizationUtils {
       return null;
     }
     HiveObjectType objType = getThriftHiveObjType(privObj.getType());
-    return new HiveObjectRef(objType, privObj.getDbname(), privObj.getTableViewURI(), null, null);
+    return new HiveObjectRef(objType, privObj.getDbname(), privObj.getObjectName(), null, null);
   }
 
   public static HivePrivObjectActionType getActionType(Entity privObject) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HivePrivilegeObject.java Thu Jul 24 22:08:23 2014
@@ -17,14 +17,13 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin;
 
+import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience.LimitedPrivate;
 import org.apache.hadoop.hive.common.classification.InterfaceStability.Unstable;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-
 /**
  * Represents the object on which privilege is being granted/revoked
  */
@@ -33,42 +32,15 @@ import java.util.Arrays;
 public class HivePrivilegeObject implements Comparable<HivePrivilegeObject> {
 
   @Override
-  public String toString() {
-    String name = null;
-    switch (type) {
-    case DATABASE:
-      name = dbname;
-      break;
-    case TABLE_OR_VIEW:
-    case PARTITION:
-      name = (dbname == null ? "" : dbname + ".") + tableviewname;
-      if (partKeys != null) {
-        name += partKeys.toString();
-      }
-      break;
-    case COLUMN:
-    case LOCAL_URI:
-    case DFS_URI:
-      name = tableviewname;
-      break;
-    case COMMAND_PARAMS:
-      name = commandParams.toString();
-      break;
-    }
-    return "Object [type=" + type + ", name=" + name + "]";
-
-  }
-
-  @Override
   public int compareTo(HivePrivilegeObject o) {
     int compare = type.compareTo(o.type);
     if (compare == 0) {
       compare = dbname.compareTo(o.dbname);
     }
     if (compare == 0) {
-      compare = tableviewname != null ?
-          (o.tableviewname != null ? tableviewname.compareTo(o.tableviewname) : 1) :
-          (o.tableviewname != null ? -1 : 0);
+      compare = objectName != null ?
+          (o.objectName != null ? objectName.compareTo(o.objectName) : 1) :
+          (o.objectName != null ? -1 : 0);
     }
     if (compare == 0) {
       compare = partKeys != null ?
@@ -94,7 +66,7 @@ public class HivePrivilegeObject impleme
   }
 
   public enum HivePrivilegeObjectType {
-    GLOBAL, DATABASE, TABLE_OR_VIEW, PARTITION, COLUMN, LOCAL_URI, DFS_URI, COMMAND_PARAMS
+    GLOBAL, DATABASE, TABLE_OR_VIEW, PARTITION, COLUMN, LOCAL_URI, DFS_URI, COMMAND_PARAMS, FUNCTION
   } ;
   public enum HivePrivObjectActionType {
     OTHER, INSERT, INSERT_OVERWRITE
@@ -102,26 +74,27 @@ public class HivePrivilegeObject impleme
 
   private final HivePrivilegeObjectType type;
   private final String dbname;
-  private final String tableviewname;
+  private final String objectName;
   private final List<String> commandParams;
   private final List<String> partKeys;
   private final List<String> columns;
   private final HivePrivObjectActionType actionType;
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI) {
-    this(type, dbname, tableViewURI, HivePrivObjectActionType.OTHER);
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName) {
+    this(type, dbname, objectName, HivePrivObjectActionType.OTHER);
   }
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName
       , HivePrivObjectActionType actionType) {
-    this(type, dbname, tableViewURI, null, null, actionType, null);
+    this(type, dbname, objectName, null, null, actionType, null);
   }
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI,
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
       List<String> partKeys, String column) {
-    this(type, dbname, tableViewURI, partKeys,
+    this(type, dbname, objectName, partKeys,
         column == null ? null : new ArrayList<String>(Arrays.asList(column)),
         HivePrivObjectActionType.OTHER, null);
+
   }
 
   /**
@@ -134,17 +107,17 @@ public class HivePrivilegeObject impleme
         cmdParams);
   }
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI,
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
     List<String> partKeys, List<String> columns, List<String> commandParams) {
-    this(type, dbname, tableViewURI, partKeys, columns, HivePrivObjectActionType.OTHER, commandParams);
+    this(type, dbname, objectName, partKeys, columns, HivePrivObjectActionType.OTHER, commandParams);
   }
 
-  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String tableViewURI,
+  public HivePrivilegeObject(HivePrivilegeObjectType type, String dbname, String objectName,
       List<String> partKeys, List<String> columns, HivePrivObjectActionType actionType,
       List<String> commandParams) {
     this.type = type;
     this.dbname = dbname;
-    this.tableviewname = tableViewURI;
+    this.objectName = objectName;
     this.partKeys = partKeys;
     this.columns = columns;
     this.actionType = actionType;
@@ -159,8 +132,11 @@ public class HivePrivilegeObject impleme
     return dbname;
   }
 
-  public String getTableViewURI() {
-    return tableviewname;
+  /**
+   * @return name of table/view/uri/function name
+   */
+  public String getObjectName() {
+    return objectName;
   }
 
   public HivePrivObjectActionType getActionType() {
@@ -178,4 +154,50 @@ public class HivePrivilegeObject impleme
   public List<String> getColumns() {
     return columns;
   }
+
+  @Override
+  public String toString() {
+    String name = null;
+    switch (type) {
+    case DATABASE:
+      name = dbname;
+      break;
+    case TABLE_OR_VIEW:
+    case PARTITION:
+      name = getDbObjectName(dbname, objectName);
+      if (partKeys != null) {
+        name += partKeys.toString();
+      }
+      break;
+    case FUNCTION:
+      name = getDbObjectName(dbname, objectName);
+      break;
+    case COLUMN:
+    case LOCAL_URI:
+    case DFS_URI:
+      name = objectName;
+      break;
+    case COMMAND_PARAMS:
+      name = commandParams.toString();
+      break;
+    }
+
+    // get the string representing action type if its non default action type
+    String actionTypeStr ="";
+    if (actionType != null) {
+      switch (actionType) {
+      case INSERT:
+      case INSERT_OVERWRITE:
+        actionTypeStr = ", action=" + actionType;
+      default:
+      }
+    }
+
+    return "Object [type=" + type + ", name=" + name + actionTypeStr + "]";
+  }
+
+  private String getDbObjectName(String dbname2, String objectName2) {
+    return (dbname == null ? "" : dbname + ".") + objectName;
+  }
+
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java Thu Jul 24 22:08:23 2014
@@ -18,6 +18,10 @@
 
 package org.apache.hadoop.hive.ql.security.authorization.plugin;
 
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.Database;
@@ -38,10 +42,6 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAccessController;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
 public class HiveV1Authorizer implements HiveAuthorizer {
 
   private final HiveConf conf;
@@ -141,8 +141,8 @@ public class HiveV1Authorizer implements
       throw new HiveException("Database " + privObject.getDbname() + " does not exists");
     }
     Table tableObj = null;
-    if (privObject.getTableViewURI() != null) {
-      tableObj = hive.getTable(dbObj.getName(), privObject.getTableViewURI());
+    if (privObject.getObjectName() != null) {
+      tableObj = hive.getTable(dbObj.getName(), privObject.getObjectName());
     }
 
     List<String> partValues = null;
@@ -308,8 +308,8 @@ public class HiveV1Authorizer implements
           throw new HiveException("Database " + privObj.getDbname() + " does not exists");
         }
         Table tableObj = null;
-        if (privObj.getTableViewURI() != null) {
-          tableObj = hive.getTable(dbObj.getName(), privObj.getTableViewURI());
+        if (privObj.getObjectName() != null) {
+          tableObj = hive.getTable(dbObj.getName(), privObj.getObjectName());
         }
         List<String> partValues = privObj.getPartKeys();
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/GrantPrivAuthUtils.java Thu Jul 24 22:08:23 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
@@ -64,9 +65,11 @@ public class GrantPrivAuthUtils {
         metastoreClient, userName, hivePrivObject, curRoles, isAdmin);
 
     // check if required privileges is subset of available privileges
+    List<String> deniedMessages = new ArrayList<String>();
     Collection<SQLPrivTypeGrant> missingPrivs = reqPrivileges.findMissingPrivs(availPrivs);
-    SQLAuthorizationUtils.assertNoMissingPrivilege(missingPrivs, new HivePrincipal(userName,
-        HivePrincipalType.USER), hivePrivObject, opType);
+    SQLAuthorizationUtils.addMissingPrivMsg(missingPrivs, hivePrivObject, deniedMessages);
+    SQLAuthorizationUtils.assertNoDeniedPermissions(new HivePrincipal(userName,
+        HivePrincipalType.USER), opType, deniedMessages);
   }
 
   private static RequiredPrivileges getGrantRequiredPrivileges(List<HivePrivilege> hivePrivileges)

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLAuthorizationUtils.java Thu Jul 24 22:08:23 2014
@@ -264,7 +264,7 @@ public class SQLAuthorizationUtils {
       Table thriftTableObj = null;
       try {
         thriftTableObj = metastoreClient.getTable(hivePrivObject.getDbname(),
-            hivePrivObject.getTableViewURI());
+            hivePrivObject.getObjectName());
       } catch (Exception e) {
         throwGetObjErr(e, hivePrivObject);
       }
@@ -352,19 +352,15 @@ public class SQLAuthorizationUtils {
     }
   }
 
-  public static void assertNoMissingPrivilege(Collection<SQLPrivTypeGrant> missingPrivs,
-      HivePrincipal hivePrincipal, HivePrivilegeObject hivePrivObject, HiveOperationType opType)
-      throws HiveAccessControlException {
+  public static void addMissingPrivMsg(Collection<SQLPrivTypeGrant> missingPrivs,
+      HivePrivilegeObject hivePrivObject, List<String> deniedMessages) {
     if (missingPrivs.size() != 0) {
       // there are some required privileges missing, create error message
       // sort the privileges so that error message is deterministic (for tests)
       List<SQLPrivTypeGrant> sortedmissingPrivs = new ArrayList<SQLPrivTypeGrant>(missingPrivs);
       Collections.sort(sortedmissingPrivs);
-
-      String errMsg = "Permission denied. " + hivePrincipal
-          + " does not have following privileges on " + hivePrivObject +
-          " for operation " + opType + " : " + sortedmissingPrivs;
-      throw new HiveAccessControlException(errMsg.toString());
+      String errMsg = sortedmissingPrivs + " on " + hivePrivObject;
+      deniedMessages.add(errMsg);
     }
   }
 
@@ -405,5 +401,16 @@ public class SQLAuthorizationUtils {
     return availPrivs;
   }
 
+  public static void assertNoDeniedPermissions(HivePrincipal hivePrincipal,
+      HiveOperationType hiveOpType, List<String> deniedMessages) throws HiveAccessControlException {
+    if (deniedMessages.size() != 0) {
+      Collections.sort(deniedMessages);
+      String errorMessage = "Permission denied: " + hivePrincipal
+          + " does not have following privileges for operation " + hiveOpType + " "
+          + deniedMessages;
+      throw new HiveAccessControlException(errorMessage);
+    }
+  }
+
 
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAccessController.java Thu Jul 24 22:08:23 2014
@@ -420,7 +420,7 @@ public class SQLStdHiveAccessController 
         }
 
         HivePrivilegeObject resPrivObj = new HivePrivilegeObject(
-            getPluginObjType(msObjRef.getObjectType()), msObjRef.getDbName(),
+            getPluginPrivilegeObjType(msObjRef.getObjectType()), msObjRef.getDbName(),
             msObjRef.getObjectName(), msObjRef.getPartValues(), msObjRef.getColumnName());
 
         // result grantor principal
@@ -479,8 +479,14 @@ public class SQLStdHiveAccessController 
     return false;
   }
 
-  private HivePrivilegeObjectType getPluginObjType(HiveObjectType objectType)
-      throws HiveAuthzPluginException {
+  /**
+   * Convert metastore object type to HivePrivilegeObjectType.
+   * Also verifies that metastore object type is of a type on which metastore privileges are
+   * supported by sql std auth.
+   * @param objectType
+   * @return corresponding HivePrivilegeObjectType
+   */
+  private HivePrivilegeObjectType getPluginPrivilegeObjType(HiveObjectType objectType) {
     switch (objectType) {
     case DATABASE:
       return HivePrivilegeObjectType.DATABASE;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java Thu Jul 24 22:08:23 2014
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
@@ -71,13 +72,16 @@ public class SQLStdHiveAuthorizationVali
     IMetaStoreClient metastoreClient = metastoreClientFactory.getHiveMetastoreClient();
 
     // check privileges on input and output objects
-    checkPrivileges(hiveOpType, inputHObjs, metastoreClient, userName, IOType.INPUT);
-    checkPrivileges(hiveOpType, outputHObjs, metastoreClient, userName, IOType.OUTPUT);
+    List<String> deniedMessages = new ArrayList<String>();
+    checkPrivileges(hiveOpType, inputHObjs, metastoreClient, userName, IOType.INPUT, deniedMessages);
+    checkPrivileges(hiveOpType, outputHObjs, metastoreClient, userName, IOType.OUTPUT, deniedMessages);
 
+    SQLAuthorizationUtils.assertNoDeniedPermissions(new HivePrincipal(userName,
+        HivePrincipalType.USER), hiveOpType, deniedMessages);
   }
 
   private void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> hiveObjects,
-      IMetaStoreClient metastoreClient, String userName, IOType ioType)
+      IMetaStoreClient metastoreClient, String userName, IOType ioType, List<String> deniedMessages)
       throws HiveAuthzPluginException, HiveAccessControlException {
 
     if (hiveObjects == null) {
@@ -95,7 +99,7 @@ public class SQLStdHiveAuthorizationVali
       switch (hiveObj.getType()) {
       case LOCAL_URI:
       case DFS_URI:
-        availPrivs = SQLAuthorizationUtils.getPrivilegesFromFS(new Path(hiveObj.getTableViewURI()),
+        availPrivs = SQLAuthorizationUtils.getPrivilegesFromFS(new Path(hiveObj.getObjectName()),
             conf, userName);
         break;
       case PARTITION:
@@ -104,9 +108,9 @@ public class SQLStdHiveAuthorizationVali
         // ignore partitions
         continue;
       case COMMAND_PARAMS:
-        // operations that have objects of type COMMAND_PARAMS are authorized
+      case FUNCTION:
+        // operations that have objects of type COMMAND_PARAMS, FUNCTION are authorized
         // solely on the type
-        // Assume no available privileges, unless in admin role
         if (privController.isUserAdmin()) {
           availPrivs.addPrivilege(SQLPrivTypeGrant.ADMIN_PRIV);
         }
@@ -118,8 +122,7 @@ public class SQLStdHiveAuthorizationVali
 
       // Verify that there are no missing privileges
       Collection<SQLPrivTypeGrant> missingPriv = requiredPrivs.findMissingPrivs(availPrivs);
-      SQLAuthorizationUtils.assertNoMissingPrivilege(missingPriv, new HivePrincipal(userName,
-          HivePrincipalType.USER), hiveObj, hiveOpType);
+      SQLAuthorizationUtils.addMissingPrivMsg(missingPriv, hiveObj, deniedMessages);
 
     }
   }

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_addjar.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_addjar.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_addjar.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_addjar.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-Query returned non-zero code: 1, cause: Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=COMMAND_PARAMS, name=[jar, dummy.jar]] for operation ADD : [ADMIN PRIVILEGE]
+Query returned non-zero code: 1, cause: Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation ADD [[ADMIN PRIVILEGE] on Object [type=COMMAND_PARAMS, name=[jar, dummy.jar]]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_addpartition.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_addpartition.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_addpartition.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_addpartition.q.out Thu Jul 24 22:08:23 2014
@@ -7,4 +7,4 @@ create table tpart(i int, j int) partiti
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@tpart
-FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.tpart] for operation ALTERTABLE_ADDPARTS : [INSERT]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation ALTERTABLE_ADDPARTS [[INSERT] on Object [type=TABLE_OR_VIEW, name=default.tpart]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner.q.out Thu Jul 24 22:08:23 2014
@@ -6,4 +6,4 @@ PREHOOK: type: CREATEDATABASE
 
 create database dbao
 POSTHOOK: type: CREATEDATABASE
-FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=DATABASE, name=dbao] for operation ALTERDATABASE_OWNER : [ADMIN PRIVILEGE]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation ALTERDATABASE_OWNER [[ADMIN PRIVILEGE] on Object [type=DATABASE, name=dbao]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner_default.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner_default.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner_default.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_alter_db_owner_default.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=DATABASE, name=default] for operation ALTERDATABASE_OWNER : [ADMIN PRIVILEGE]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation ALTERDATABASE_OWNER [[ADMIN PRIVILEGE] on Object [type=DATABASE, name=default]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_compile.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_compile.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_compile.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_compile.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-Query returned non-zero code: 1, cause: Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=COMMAND_PARAMS, name=[`dummy code ` AS groovy NAMED something.groovy]] for operation COMPILE : [ADMIN PRIVILEGE]
+Query returned non-zero code: 1, cause: Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation COMPILE [[ADMIN PRIVILEGE] on Object [type=COMMAND_PARAMS, name=[`dummy code ` AS groovy NAMED something.groovy]]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_create_func1.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-FAILED: HiveAccessControlException Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=DATABASE, name=default] for operation CREATEFUNCTION : [ADMIN PRIVILEGE]
+FAILED: HiveAccessControlException Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation CREATEFUNCTION [[ADMIN PRIVILEGE] on Object [type=DATABASE, name=default], [ADMIN PRIVILEGE] on Object [type=FUNCTION, name=perm_fn]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_create_func2.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_create_func2.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_create_func2.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_create_func2.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-FAILED: HiveAccessControlException Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=DATABASE, name=default] for operation CREATEFUNCTION : [ADMIN PRIVILEGE]
+FAILED: HiveAccessControlException Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation CREATEFUNCTION [[ADMIN PRIVILEGE] on Object [type=FUNCTION, name=temp_fn]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_create_macro1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_create_macro1.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_create_macro1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_create_macro1.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-FAILED: HiveAccessControlException Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=DATABASE, name=default] for operation CREATEMACRO : [ADMIN PRIVILEGE]
+FAILED: HiveAccessControlException Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation CREATEMACRO [[ADMIN PRIVILEGE] on Object [type=DATABASE, name=default]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_createview.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_createview.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_createview.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_createview.q.out Thu Jul 24 22:08:23 2014
@@ -7,4 +7,4 @@ create table t1(i int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t1
-FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] for operation CREATEVIEW : [SELECT with grant]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation CREATEVIEW [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.t1]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_ctas.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_ctas.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_ctas.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_ctas.q.out Thu Jul 24 22:08:23 2014
@@ -7,4 +7,4 @@ create table t1(i int)
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: database:default
 POSTHOOK: Output: default@t1
-FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] for operation CREATETABLE_AS_SELECT : [SELECT]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation CREATETABLE_AS_SELECT [[SELECT] on Object [type=TABLE_OR_VIEW, name=default.t1]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_deletejar.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_deletejar.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_deletejar.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_deletejar.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-Query returned non-zero code: 1, cause: Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=COMMAND_PARAMS, name=[jar, dummy.jar]] for operation DELETE : [ADMIN PRIVILEGE]
+Query returned non-zero code: 1, cause: Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation DELETE [[ADMIN PRIVILEGE] on Object [type=COMMAND_PARAMS, name=[jar, dummy.jar]]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_desc_table_nosel.q.out Thu Jul 24 22:08:23 2014
@@ -26,4 +26,4 @@ PREHOOK: Output: default@t1
 POSTHOOK: query: revoke select on table t1 from user user2
 POSTHOOK: type: REVOKE_PRIVILEGE
 POSTHOOK: Output: default@t1
-FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.t1] for operation DESCTABLE : [SELECT]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation DESCTABLE [[SELECT] on Object [type=TABLE_OR_VIEW, name=default.t1]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_dfs.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_dfs.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_dfs.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_dfs.q.out Thu Jul 24 22:08:23 2014
@@ -1 +1 @@
-Query returned non-zero code: 1, cause: Permission denied. Principal [name=hive_test_user, type=USER] does not have following privileges on Object [type=COMMAND_PARAMS, name=[-ls, dummy_file]] for operation DFS : [ADMIN PRIVILEGE]
+Query returned non-zero code: 1, cause: Permission denied: Principal [name=hive_test_user, type=USER] does not have following privileges for operation DFS [[ADMIN PRIVILEGE] on Object [type=COMMAND_PARAMS, name=[-ls, dummy_file]]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_cascade.q.out Thu Jul 24 22:08:23 2014
@@ -50,4 +50,4 @@ POSTHOOK: query: show current roles
 POSTHOOK: type: SHOW_ROLES
 public
 
-FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=dba2.tab2] for operation DROPDATABASE : [OBJECT OWNERSHIP]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation DROPDATABASE [[OBJECT OWNERSHIP] on Object [type=TABLE_OR_VIEW, name=dba2.tab2]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_drop_db_empty.q.out Thu Jul 24 22:08:23 2014
@@ -46,4 +46,4 @@ POSTHOOK: query: show current roles
 POSTHOOK: type: SHOW_ROLES
 public
 
-FAILED: HiveAccessControlException Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=DATABASE, name=dba2] for operation DROPDATABASE : [OBJECT OWNERSHIP]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation DROPDATABASE [[OBJECT OWNERSHIP] on Object [type=DATABASE, name=dba2]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_droppartition.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_droppartition.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_droppartition.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_droppartition.q.out Thu Jul 24 22:08:23 2014
@@ -16,4 +16,4 @@ POSTHOOK: type: ALTERTABLE_ADDPARTS
 #### A masked pattern was here ####
 POSTHOOK: Output: default@tpart
 POSTHOOK: Output: default@tpart@k=abc
-FAILED: HiveAccessControlException Permission denied. Principal [name=user1, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.tpart] for operation ALTERTABLE_DROPPARTS : [DELETE]
+FAILED: HiveAccessControlException Permission denied: Principal [name=user1, type=USER] does not have following privileges for operation ALTERTABLE_DROPPARTS [[DELETE] on Object [type=TABLE_OR_VIEW, name=default.tpart]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_fail_8.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_fail_8.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_fail_8.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_fail_8.q.out Thu Jul 24 22:08:23 2014
@@ -45,4 +45,4 @@ PREHOOK: query: -- Now that grant option
 GRANT SELECT ON authorization_fail TO USER user3
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@authorization_fail
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.authorization_fail] for operation GRANT_PRIVILEGE : [SELECT with grant]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant] on Object [type=TABLE_OR_VIEW, name=default.authorization_fail]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_allpriv.q.out Thu Jul 24 22:08:23 2014
@@ -21,4 +21,4 @@ PREHOOK: query: -- try grant all to user
 GRANT ALL ON table_priv_allf TO USER user3
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@table_priv_allf
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.table_priv_allf] for operation GRANT_PRIVILEGE : [SELECT with grant, UPDATE with grant, DELETE with grant]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[SELECT with grant, UPDATE with grant, DELETE with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_allf]]

Modified: hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out?rev=1613310&r1=1613309&r2=1613310&view=diff
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out (original)
+++ hive/trunk/ql/src/test/results/clientnegative/authorization_grant_table_fail1.q.out Thu Jul 24 22:08:23 2014
@@ -13,4 +13,4 @@ PREHOOK: query: -- try grant insert to u
 GRANT INSERT ON table_priv_gfail1 TO USER user3
 PREHOOK: type: GRANT_PRIVILEGE
 PREHOOK: Output: default@table_priv_gfail1
-FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied. Principal [name=user2, type=USER] does not have following privileges on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1] for operation GRANT_PRIVILEGE : [INSERT with grant]
+FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.DDLTask. Permission denied: Principal [name=user2, type=USER] does not have following privileges for operation GRANT_PRIVILEGE [[INSERT with grant] on Object [type=TABLE_OR_VIEW, name=default.table_priv_gfail1]]



Mime
View raw message