incubator-hcatalog-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From tra...@apache.org
Subject svn commit: r1383152 [6/27] - in /incubator/hcatalog/trunk: ./ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/ hcatalog-pig-adapter/src/main/java/org/apache/hcatalog/pig/drivers/ hcatalog-pig-adapter/src/test/java/org/apache/hcatalog/pig/ s...
Date Mon, 10 Sep 2012 23:29:03 GMT
Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java Mon Sep 10 23:28:55 2012
@@ -53,278 +53,278 @@ import org.apache.hcatalog.common.HCatUt
 
 public class HCatCli {
 
-  @SuppressWarnings("static-access")
-  public static void main(String[] args) {
+    @SuppressWarnings("static-access")
+    public static void main(String[] args) {
 
-    try {
-      LogUtils.initHiveLog4j();
-    } catch (LogInitializationException e) {
+        try {
+            LogUtils.initHiveLog4j();
+        } catch (LogInitializationException e) {
 
-    }
+        }
 
-    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
-    ss.in = System.in;
-    try {
-      ss.out = new PrintStream(System.out, true, "UTF-8");
-      ss.err = new PrintStream(System.err, true, "UTF-8");
-    } catch (UnsupportedEncodingException e) {
-      System.exit(1);
-    }
+        CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
+        ss.in = System.in;
+        try {
+            ss.out = new PrintStream(System.out, true, "UTF-8");
+            ss.err = new PrintStream(System.err, true, "UTF-8");
+        } catch (UnsupportedEncodingException e) {
+            System.exit(1);
+        }
 
-    HiveConf conf = ss.getConf();
+        HiveConf conf = ss.getConf();
 
-    HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
+        HiveConf.setVar(conf, ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
 
-    SessionState.start(ss);
+        SessionState.start(ss);
 
-    Options options = new Options();
+        Options options = new Options();
 
-    // -e 'quoted-query-string'
-    options.addOption(OptionBuilder
-        .hasArg()
-        .withArgName("exec")
-        .withDescription("hcat command given from command line")
-        .create('e'));
-
-    // -f <query-file>
-    options.addOption(OptionBuilder
-        .hasArg()
-        .withArgName("file")
-        .withDescription("hcat commands in file")
-        .create('f'));
-
-    // -g
-    options.addOption(OptionBuilder
-        .hasArg().
-        withArgName("group").
-        withDescription("group for the db/table specified in CREATE statement").
-        create('g'));
-
-    // -p
-    options.addOption(OptionBuilder
-        .hasArg()
-        .withArgName("perms")
-        .withDescription("permissions for the db/table specified in CREATE statement")
-        .create('p'));
-
-    // -D
-    options.addOption(OptionBuilder
-        .hasArgs(2)
-        .withArgName("property=value")
-        .withValueSeparator()
-        .withDescription("use hadoop value for given property")
-        .create('D'));
-
-    // [-h|--help]
-    options.addOption(new Option("h", "help", false, "Print help information"));
-
-    Parser parser = new GnuParser();
-    CommandLine cmdLine = null;
-
-    try {
-      cmdLine  = parser.parse(options,args);
-
-    } catch (ParseException e) {
-      printUsage(options, ss.err);
-      System.exit(1);
-    }
-    // -e
-    String execString = (String) cmdLine.getOptionValue('e');
-    // -f
-    String fileName = (String) cmdLine.getOptionValue('f');
-    // -h
-    if (cmdLine.hasOption('h')) {
-      printUsage(options,ss.out);
-      System.exit(0);
-    }
+        // -e 'quoted-query-string'
+        options.addOption(OptionBuilder
+                .hasArg()
+                .withArgName("exec")
+                .withDescription("hcat command given from command line")
+                .create('e'));
+
+        // -f <query-file>
+        options.addOption(OptionBuilder
+                .hasArg()
+                .withArgName("file")
+                .withDescription("hcat commands in file")
+                .create('f'));
+
+        // -g
+        options.addOption(OptionBuilder
+                .hasArg().
+                withArgName("group").
+                withDescription("group for the db/table specified in CREATE statement").
+                create('g'));
+
+        // -p
+        options.addOption(OptionBuilder
+                .hasArg()
+                .withArgName("perms")
+                .withDescription("permissions for the db/table specified in CREATE statement")
+                .create('p'));
+
+        // -D
+        options.addOption(OptionBuilder
+                .hasArgs(2)
+                .withArgName("property=value")
+                .withValueSeparator()
+                .withDescription("use hadoop value for given property")
+                .create('D'));
+
+        // [-h|--help]
+        options.addOption(new Option("h", "help", false, "Print help information"));
+
+        Parser parser = new GnuParser();
+        CommandLine cmdLine = null;
+
+        try {
+            cmdLine = parser.parse(options, args);
+
+        } catch (ParseException e) {
+            printUsage(options, ss.err);
+            System.exit(1);
+        }
+        // -e
+        String execString = (String) cmdLine.getOptionValue('e');
+        // -f
+        String fileName = (String) cmdLine.getOptionValue('f');
+        // -h
+        if (cmdLine.hasOption('h')) {
+            printUsage(options, ss.out);
+            System.exit(0);
+        }
 
-    if (execString != null && fileName != null) {
-      ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
-      printUsage(options,ss.err);
-      System.exit(1);
-    }
+        if (execString != null && fileName != null) {
+            ss.err.println("The '-e' and '-f' options cannot be specified simultaneously");
+            printUsage(options, ss.err);
+            System.exit(1);
+        }
 
-    // -p
-    String perms = (String)cmdLine.getOptionValue('p');
-    if(perms != null){
-      validatePermissions(ss, conf, perms);
-    }
+        // -p
+        String perms = (String) cmdLine.getOptionValue('p');
+        if (perms != null) {
+            validatePermissions(ss, conf, perms);
+        }
 
-    // -g
-    String grp = (String) cmdLine.getOptionValue('g');
-    if(grp != null){
-      conf.set(HCatConstants.HCAT_GROUP, grp);
-    }
+        // -g
+        String grp = (String) cmdLine.getOptionValue('g');
+        if (grp != null) {
+            conf.set(HCatConstants.HCAT_GROUP, grp);
+        }
 
-    // -D
-    setConfProperties(conf, cmdLine.getOptionProperties("D"));
+        // -D
+        setConfProperties(conf, cmdLine.getOptionProperties("D"));
 
-    if (execString != null) {
-      System.exit(processLine(execString));
-    }
+        if (execString != null) {
+            System.exit(processLine(execString));
+        }
 
-    try {
-      if (fileName != null) {
-        System.exit(processFile(fileName));
-      }
-    } catch (FileNotFoundException e) {
-      ss.err.println("Input file not found. (" + e.getMessage() + ")");
-      System.exit(1);
-    } catch (IOException e) {
-      ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
-      System.exit(1);
-    }
+        try {
+            if (fileName != null) {
+                System.exit(processFile(fileName));
+            }
+        } catch (FileNotFoundException e) {
+            ss.err.println("Input file not found. (" + e.getMessage() + ")");
+            System.exit(1);
+        } catch (IOException e) {
+            ss.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
+            System.exit(1);
+        }
 
-    // -h
-    printUsage(options, ss.err);
-    System.exit(1);
-  }
-
-  private static void setConfProperties(HiveConf conf, Properties props) {
-    for(java.util.Map.Entry<Object, Object> e : props.entrySet())
-        conf.set((String) e.getKey(), (String) e.getValue());
-  }
-
-  private static int processLine(String line) {
-    int ret = 0;
-
-    String command = "";
-    for (String oneCmd : line.split(";")) {
-
-      if (StringUtils.endsWith(oneCmd, "\\")) {
-        command += StringUtils.chop(oneCmd) + ";";
-        continue;
-      } else {
-        command += oneCmd;
-      }
-      if (StringUtils.isBlank(command)) {
-        continue;
-      }
+        // -h
+        printUsage(options, ss.err);
+        System.exit(1);
+    }
+
+    private static void setConfProperties(HiveConf conf, Properties props) {
+        for (java.util.Map.Entry<Object, Object> e : props.entrySet())
+            conf.set((String) e.getKey(), (String) e.getValue());
+    }
+
+    private static int processLine(String line) {
+        int ret = 0;
+
+        String command = "";
+        for (String oneCmd : line.split(";")) {
+
+            if (StringUtils.endsWith(oneCmd, "\\")) {
+                command += StringUtils.chop(oneCmd) + ";";
+                continue;
+            } else {
+                command += oneCmd;
+            }
+            if (StringUtils.isBlank(command)) {
+                continue;
+            }
 
-      ret = processCmd(command);
-      command = "";
+            ret = processCmd(command);
+            command = "";
+        }
+        return ret;
     }
-    return ret;
-  }
 
-  private static int processFile(String fileName) throws IOException {
-    FileReader fileReader = null;
-    BufferedReader reader = null;
-    try {
-      fileReader = new FileReader(fileName);
-      reader = new BufferedReader(fileReader);
-      String line;
-      StringBuilder qsb = new StringBuilder();
-
-      while ((line = reader.readLine()) != null) {
-        qsb.append(line + "\n");
-      }
-
-      return (processLine(qsb.toString()));
-    } finally {
-      if (fileReader != null) {
-        fileReader.close();
-      }
-      if(reader != null) {
-        reader.close();
-      }
+    private static int processFile(String fileName) throws IOException {
+        FileReader fileReader = null;
+        BufferedReader reader = null;
+        try {
+            fileReader = new FileReader(fileName);
+            reader = new BufferedReader(fileReader);
+            String line;
+            StringBuilder qsb = new StringBuilder();
+
+            while ((line = reader.readLine()) != null) {
+                qsb.append(line + "\n");
+            }
+
+            return (processLine(qsb.toString()));
+        } finally {
+            if (fileReader != null) {
+                fileReader.close();
+            }
+            if (reader != null) {
+                reader.close();
+            }
+        }
     }
-  }
 
-  private static int processCmd(String cmd){
+    private static int processCmd(String cmd) {
 
-    SessionState ss = SessionState.get();
-    long start = System.currentTimeMillis();
+        SessionState ss = SessionState.get();
+        long start = System.currentTimeMillis();
 
-    cmd = cmd.trim();
-    String firstToken = cmd.split("\\s+")[0].trim();
+        cmd = cmd.trim();
+        String firstToken = cmd.split("\\s+")[0].trim();
 
-    if(firstToken.equalsIgnoreCase("set")){
-      return new SetProcessor().run(cmd.substring(firstToken.length()).trim()).getResponseCode();
-    } else if (firstToken.equalsIgnoreCase("dfs")){
-      return new DfsProcessor(ss.getConf()).run(cmd.substring(firstToken.length()).trim()).getResponseCode();
-    }
+        if (firstToken.equalsIgnoreCase("set")) {
+            return new SetProcessor().run(cmd.substring(firstToken.length()).trim()).getResponseCode();
+        } else if (firstToken.equalsIgnoreCase("dfs")) {
+            return new DfsProcessor(ss.getConf()).run(cmd.substring(firstToken.length()).trim()).getResponseCode();
+        }
 
-    HCatDriver driver = new HCatDriver();
+        HCatDriver driver = new HCatDriver();
 
-    int ret = driver.run(cmd).getResponseCode();
+        int ret = driver.run(cmd).getResponseCode();
 
-    if (ret != 0) {
-      driver.close();
-      System.exit(ret);
-    }
+        if (ret != 0) {
+            driver.close();
+            System.exit(ret);
+        }
 
-    ArrayList<String> res = new ArrayList<String>();
-    try {
-      while (driver.getResults(res)) {
-        for (String r : res) {
-          ss.out.println(r);
-        }
-        res.clear();
-      }
-    } catch (IOException e) {
-      ss.err.println("Failed with exception " + e.getClass().getName() + ":"
-          + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
-      ret = 1;
-    } catch (CommandNeedRetryException e) {
-        ss.err.println("Failed with exception " + e.getClass().getName() + ":"
+        ArrayList<String> res = new ArrayList<String>();
+        try {
+            while (driver.getResults(res)) {
+                for (String r : res) {
+                    ss.out.println(r);
+                }
+                res.clear();
+            }
+        } catch (IOException e) {
+            ss.err.println("Failed with exception " + e.getClass().getName() + ":"
                 + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
             ret = 1;
-    }
+        } catch (CommandNeedRetryException e) {
+            ss.err.println("Failed with exception " + e.getClass().getName() + ":"
+                + e.getMessage() + "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
+            ret = 1;
+        }
 
-    int cret = driver.close();
-    if (ret == 0) {
-      ret = cret;
-    }
+        int cret = driver.close();
+        if (ret == 0) {
+            ret = cret;
+        }
 
-    long end = System.currentTimeMillis();
-    if (end > start) {
-      double timeTaken = (end - start) / 1000.0;
-      ss.err.println("Time taken: " + timeTaken + " seconds");
+        long end = System.currentTimeMillis();
+        if (end > start) {
+            double timeTaken = (end - start) / 1000.0;
+            ss.err.println("Time taken: " + timeTaken + " seconds");
+        }
+        return ret;
     }
-    return ret;
-  }
 
-  private static void printUsage(Options options, OutputStream os) {
-    PrintWriter pw = new PrintWriter(os);
-    new HelpFormatter().printHelp(pw, 2 * HelpFormatter.DEFAULT_WIDTH,
-      "hcat { -e \"<query>\" | -f \"<filepath>\" } [ -g \"<group>\" ] [ -p \"<perms>\" ] [ -D\"<name>=<value>\" ]",
-        null,options, HelpFormatter.DEFAULT_LEFT_PAD,HelpFormatter.DEFAULT_DESC_PAD,
-        null, false);
-    pw.flush();
-  }
-
-  private static void validatePermissions(CliSessionState ss, HiveConf conf, String perms) {
-    perms = perms.trim();
-    FsPermission fp = null;
-
-    if (perms.matches("^\\s*([r,w,x,-]{9})\\s*$")){
-      fp = FsPermission.valueOf("d"+perms);
-    } else if (perms.matches("^\\s*([0-7]{3})\\s*$")){
-      fp = new FsPermission(Short.decode("0"+perms));
-    } else {
-      ss.err.println("Invalid permission specification: "+perms);
-      System.exit(1);
-    }
+    private static void printUsage(Options options, OutputStream os) {
+        PrintWriter pw = new PrintWriter(os);
+        new HelpFormatter().printHelp(pw, 2 * HelpFormatter.DEFAULT_WIDTH,
+            "hcat { -e \"<query>\" | -f \"<filepath>\" } [ -g \"<group>\" ] [ -p \"<perms>\" ] [ -D\"<name>=<value>\" ]",
+            null, options, HelpFormatter.DEFAULT_LEFT_PAD, HelpFormatter.DEFAULT_DESC_PAD,
+            null, false);
+        pw.flush();
+    }
+
+    private static void validatePermissions(CliSessionState ss, HiveConf conf, String perms) {
+        perms = perms.trim();
+        FsPermission fp = null;
+
+        if (perms.matches("^\\s*([r,w,x,-]{9})\\s*$")) {
+            fp = FsPermission.valueOf("d" + perms);
+        } else if (perms.matches("^\\s*([0-7]{3})\\s*$")) {
+            fp = new FsPermission(Short.decode("0" + perms));
+        } else {
+            ss.err.println("Invalid permission specification: " + perms);
+            System.exit(1);
+        }
 
-    if (!HCatUtil.validateMorePermissive(fp.getUserAction(),fp.getGroupAction())){
-      ss.err.println("Invalid permission specification: "+perms+" : user permissions must be more permissive than group permission ");
-      System.exit(1);
-    }
-    if (!HCatUtil.validateMorePermissive(fp.getGroupAction(),fp.getOtherAction())){
-      ss.err.println("Invalid permission specification: "+perms+" : group permissions must be more permissive than other permission ");
-      System.exit(1);
-    }
-    if ( (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getUserAction())) ||
-        (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getGroupAction())) ||
-        (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getOtherAction())) ) {
-      ss.err.println("Invalid permission specification: "+perms+" : permissions must have execute permissions if read or write permissions are specified ");
-      System.exit(1);
-    }
+        if (!HCatUtil.validateMorePermissive(fp.getUserAction(), fp.getGroupAction())) {
+            ss.err.println("Invalid permission specification: " + perms + " : user permissions must be more permissive than group permission ");
+            System.exit(1);
+        }
+        if (!HCatUtil.validateMorePermissive(fp.getGroupAction(), fp.getOtherAction())) {
+            ss.err.println("Invalid permission specification: " + perms + " : group permissions must be more permissive than other permission ");
+            System.exit(1);
+        }
+        if ((!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getUserAction())) ||
+            (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getGroupAction())) ||
+            (!HCatUtil.validateExecuteBitPresentIfReadOrWrite(fp.getOtherAction()))) {
+            ss.err.println("Invalid permission specification: " + perms + " : permissions must have execute permissions if read or write permissions are specified ");
+            System.exit(1);
+        }
 
-    conf.set(HCatConstants.HCAT_PERMS, "d"+fp.toString());
+        conf.set(HCatConstants.HCAT_PERMS, "d" + fp.toString());
 
-  }
+    }
 
 
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatDriver.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatDriver.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatDriver.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatDriver.java Mon Sep 10 23:28:55 2012
@@ -34,109 +34,109 @@ import org.apache.hcatalog.common.HCatCo
 
 public class HCatDriver extends Driver {
 
-  @Override
-  public CommandProcessorResponse run(String command) {
+    @Override
+    public CommandProcessorResponse run(String command) {
 
-    CommandProcessorResponse cpr = null;
-    try {
-      cpr = super.run(command);
-    } catch (CommandNeedRetryException e) {
-      return new CommandProcessorResponse(-1, e.toString(), "");
-    }
+        CommandProcessorResponse cpr = null;
+        try {
+            cpr = super.run(command);
+        } catch (CommandNeedRetryException e) {
+            return new CommandProcessorResponse(-1, e.toString(), "");
+        }
 
-    SessionState ss = SessionState.get();
+        SessionState ss = SessionState.get();
 
-    if (cpr.getResponseCode() == 0){
-      // Only attempt to do this, if cmd was successful.
-      int rc = setFSPermsNGrp(ss);
-      cpr = new CommandProcessorResponse(rc);
-    }
-    // reset conf vars
-    ss.getConf().set(HCatConstants.HCAT_CREATE_DB_NAME, "");
-    ss.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, "");
+        if (cpr.getResponseCode() == 0) {
+            // Only attempt to do this, if cmd was successful.
+            int rc = setFSPermsNGrp(ss);
+            cpr = new CommandProcessorResponse(rc);
+        }
+        // reset conf vars
+        ss.getConf().set(HCatConstants.HCAT_CREATE_DB_NAME, "");
+        ss.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, "");
 
-    return cpr;
-  }
+        return cpr;
+    }
 
-  private int setFSPermsNGrp(SessionState ss) {
+    private int setFSPermsNGrp(SessionState ss) {
 
-    Configuration conf =ss.getConf();
+        Configuration conf = ss.getConf();
 
-    String tblName = conf.get(HCatConstants.HCAT_CREATE_TBL_NAME,"");
-    if (tblName.isEmpty()) {
-      tblName = conf.get("import.destination.table", "");
-      conf.set("import.destination.table", "");
-    }
-    String dbName = conf.get(HCatConstants.HCAT_CREATE_DB_NAME, "");
-    String grp = conf.get(HCatConstants.HCAT_GROUP,null);
-    String permsStr = conf.get(HCatConstants.HCAT_PERMS,null);
-
-    if(tblName.isEmpty() && dbName.isEmpty()){
-      // it wasn't create db/table
-      return 0;
-    }
+        String tblName = conf.get(HCatConstants.HCAT_CREATE_TBL_NAME, "");
+        if (tblName.isEmpty()) {
+            tblName = conf.get("import.destination.table", "");
+            conf.set("import.destination.table", "");
+        }
+        String dbName = conf.get(HCatConstants.HCAT_CREATE_DB_NAME, "");
+        String grp = conf.get(HCatConstants.HCAT_GROUP, null);
+        String permsStr = conf.get(HCatConstants.HCAT_PERMS, null);
+
+        if (tblName.isEmpty() && dbName.isEmpty()) {
+            // it wasn't create db/table
+            return 0;
+        }
 
-    if(null == grp && null == permsStr) {
-      // there were no grp and perms to begin with.
-      return 0;
-    }
+        if (null == grp && null == permsStr) {
+            // there were no grp and perms to begin with.
+            return 0;
+        }
 
-    FsPermission perms = FsPermission.valueOf(permsStr);
+        FsPermission perms = FsPermission.valueOf(permsStr);
 
-    if(!tblName.isEmpty()){
-      Hive db = null;
-      try{
-        db = Hive.get();
-        Table tbl =  db.getTable(tblName);
-        Path tblPath = tbl.getPath();
-
-        FileSystem fs = tblPath.getFileSystem(conf);
-        if(null != perms){
-          fs.setPermission(tblPath, perms);
-        }
-        if(null != grp){
-          fs.setOwner(tblPath, null, grp);
-        }
-        return 0;
-
-      } catch (Exception e){
-          ss.err.println(String.format("Failed to set permissions/groups on TABLE: <%s> %s",tblName,e.getMessage()));
-          try {  // We need to drop the table.
-            if(null != db){ db.dropTable(tblName); }
-          } catch (HiveException he) {
-            ss.err.println(String.format("Failed to drop TABLE <%s> after failing to set permissions/groups on it. %s",tblName,e.getMessage()));
-          }
-          return 1;
-      }
-    }
-    else{
-      // looks like a db operation
-      if (dbName.isEmpty() || dbName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)){
-        // We dont set perms or groups for default dir.
-        return 0;
-      }
-      else{
-        try{
-          Hive db = Hive.get();
-          Path dbPath = new Warehouse(conf).getDatabasePath(db.getDatabase(dbName));
-          FileSystem fs = dbPath.getFileSystem(conf);
-          if(perms != null){
-            fs.setPermission(dbPath, perms);
-          }
-          if(null != grp){
-            fs.setOwner(dbPath, null, grp);
-          }
-          return 0;
-        } catch (Exception e){
-          ss.err.println(String.format("Failed to set permissions and/or group on DB: <%s> %s", dbName, e.getMessage()));
-          try {
-            Hive.get().dropDatabase(dbName);
-          } catch (Exception e1) {
-            ss.err.println(String.format("Failed to drop DB <%s> after failing to set permissions/group on it. %s", dbName, e1.getMessage()));
-          }
-          return 1;
+        if (!tblName.isEmpty()) {
+            Hive db = null;
+            try {
+                db = Hive.get();
+                Table tbl = db.getTable(tblName);
+                Path tblPath = tbl.getPath();
+
+                FileSystem fs = tblPath.getFileSystem(conf);
+                if (null != perms) {
+                    fs.setPermission(tblPath, perms);
+                }
+                if (null != grp) {
+                    fs.setOwner(tblPath, null, grp);
+                }
+                return 0;
+
+            } catch (Exception e) {
+                ss.err.println(String.format("Failed to set permissions/groups on TABLE: <%s> %s", tblName, e.getMessage()));
+                try {  // We need to drop the table.
+                    if (null != db) {
+                        db.dropTable(tblName);
+                    }
+                } catch (HiveException he) {
+                    ss.err.println(String.format("Failed to drop TABLE <%s> after failing to set permissions/groups on it. %s", tblName, e.getMessage()));
+                }
+                return 1;
+            }
+        } else {
+            // looks like a db operation
+            if (dbName.isEmpty() || dbName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
+                // We dont set perms or groups for default dir.
+                return 0;
+            } else {
+                try {
+                    Hive db = Hive.get();
+                    Path dbPath = new Warehouse(conf).getDatabasePath(db.getDatabase(dbName));
+                    FileSystem fs = dbPath.getFileSystem(conf);
+                    if (perms != null) {
+                        fs.setPermission(dbPath, perms);
+                    }
+                    if (null != grp) {
+                        fs.setOwner(dbPath, null, grp);
+                    }
+                    return 0;
+                } catch (Exception e) {
+                    ss.err.println(String.format("Failed to set permissions and/or group on DB: <%s> %s", dbName, e.getMessage()));
+                    try {
+                        Hive.get().dropDatabase(dbName);
+                    } catch (Exception e1) {
+                        ss.err.println(String.format("Failed to drop DB <%s> after failing to set permissions/group on it. %s", dbName, e1.getMessage()));
+                    }
+                    return 1;
+                }
+            }
         }
-      }
     }
-  }
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java Mon Sep 10 23:28:55 2012
@@ -34,62 +34,62 @@ import org.apache.hadoop.hive.ql.plan.DD
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hcatalog.common.HCatConstants;
 
-final class CreateDatabaseHook  extends HCatSemanticAnalyzerBase {
+final class CreateDatabaseHook extends HCatSemanticAnalyzerBase {
 
-  String databaseName;
+    String databaseName;
 
-  @Override
-  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
-  throws SemanticException {
-
-    Hive db;
-    try {
-      db = context.getHive();
-    } catch (HiveException e) {
-      throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
-    }
+    @Override
+    public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+        throws SemanticException {
 
-    // Analyze and create tbl properties object
-    int numCh = ast.getChildCount();
+        Hive db;
+        try {
+            db = context.getHive();
+        } catch (HiveException e) {
+            throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
+        }
 
-    databaseName = BaseSemanticAnalyzer.getUnescapedName((ASTNode)ast.getChild(0));
+        // Analyze and create tbl properties object
+        int numCh = ast.getChildCount();
 
-    for (int num = 1; num < numCh; num++) {
-      ASTNode child = (ASTNode) ast.getChild(num);
+        databaseName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast.getChild(0));
 
-      switch (child.getToken().getType()) {
+        for (int num = 1; num < numCh; num++) {
+            ASTNode child = (ASTNode) ast.getChild(num);
 
-      case HiveParser.TOK_IFNOTEXISTS:
-        try {
-          List<String> dbs = db.getDatabasesByPattern(databaseName);
-          if (dbs != null && dbs.size() > 0) { // db exists
-            return ast;
-          }
-        } catch (HiveException e) {
-          throw new SemanticException(e);
+            switch (child.getToken().getType()) {
+
+            case HiveParser.TOK_IFNOTEXISTS:
+                try {
+                    List<String> dbs = db.getDatabasesByPattern(databaseName);
+                    if (dbs != null && dbs.size() > 0) { // db exists
+                        return ast;
+                    }
+                } catch (HiveException e) {
+                    throw new SemanticException(e);
+                }
+                break;
+            }
         }
-        break;
-      }
+
+        return ast;
     }
 
-    return ast;
-  }
+    @Override
+    public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+                            List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+        context.getConf().set(HCatConstants.HCAT_CREATE_DB_NAME, databaseName);
+        super.postAnalyze(context, rootTasks);
+    }
 
-  @Override
-  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
-      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
-    context.getConf().set(HCatConstants.HCAT_CREATE_DB_NAME, databaseName);
-    super.postAnalyze(context, rootTasks);
-  }
-  
-  @Override
-  protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context,
-      Hive hive, DDLWork work) throws HiveException {
-    CreateDatabaseDesc createDb = work.getCreateDatabaseDesc();
-    if (createDb != null) {
-      Database db = new Database(createDb.getName(), createDb.getComment(), 
-          createDb.getLocationUri(), createDb.getDatabaseProperties());
-      authorize(db, Privilege.CREATE);
+    @Override
+    protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context,
+                                    Hive hive, DDLWork work) throws HiveException {
+        CreateDatabaseDesc createDb = work.getCreateDatabaseDesc();
+        if (createDb != null) {
+            Database db = new Database(createDb.getName(), createDb.getComment(),
+                createDb.getLocationUri(), createDb.getDatabaseProperties());
+            authorize(db, Privilege.CREATE);
+        }
     }
-  }
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java Mon Sep 10 23:28:55 2012
@@ -48,18 +48,18 @@ import org.apache.hcatalog.mapreduce.HCa
 final class CreateTableHook extends HCatSemanticAnalyzerBase {
 
     private String tableName;
-    
+
     @Override
     public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context,
-            ASTNode ast) throws SemanticException {
+                              ASTNode ast) throws SemanticException {
 
         Hive db;
         try {
             db = context.getHive();
         } catch (HiveException e) {
             throw new SemanticException(
-                    "Couldn't get Hive DB instance in semantic analysis phase.",
-                    e);
+                "Couldn't get Hive DB instance in semantic analysis phase.",
+                e);
         }
 
         // Analyze and create tbl properties object
@@ -67,7 +67,7 @@ final class CreateTableHook extends HCat
 
         String inputFormat = null, outputFormat = null;
         tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast
-                .getChild(0));
+            .getChild(0));
         boolean likeTable = false;
 
         for (int num = 1; num < numCh; num++) {
@@ -75,89 +75,89 @@ final class CreateTableHook extends HCat
 
             switch (child.getToken().getType()) {
 
-                case HiveParser.TOK_QUERY: // CTAS
-                    throw new SemanticException(
-                            "Operation not supported. Create table as " +
-                            "Select is not a valid operation.");
-
-                case HiveParser.TOK_TABLEBUCKETS:
-                    break;
-
-                case HiveParser.TOK_TBLSEQUENCEFILE:
-                    inputFormat = HCatConstants.SEQUENCEFILE_INPUT;
-                    outputFormat = HCatConstants.SEQUENCEFILE_OUTPUT;
-                    break;
-
-                case HiveParser.TOK_TBLTEXTFILE:
-                    inputFormat      = org.apache.hadoop.mapred.TextInputFormat.class.getName();
-                    outputFormat     = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName();
-
-                    break;
-
-                case HiveParser.TOK_LIKETABLE:
-                    likeTable = true;
-                    break;
-
-                case HiveParser.TOK_IFNOTEXISTS:
-                    try {
-                        List<String> tables = db.getTablesByPattern(tableName);
-                        if (tables != null && tables.size() > 0) { // table
-                                                                   // exists
-                            return ast;
-                        }
-                    } catch (HiveException e) {
-                        throw new SemanticException(e);
+            case HiveParser.TOK_QUERY: // CTAS
+                throw new SemanticException(
+                    "Operation not supported. Create table as " +
+                        "Select is not a valid operation.");
+
+            case HiveParser.TOK_TABLEBUCKETS:
+                break;
+
+            case HiveParser.TOK_TBLSEQUENCEFILE:
+                inputFormat = HCatConstants.SEQUENCEFILE_INPUT;
+                outputFormat = HCatConstants.SEQUENCEFILE_OUTPUT;
+                break;
+
+            case HiveParser.TOK_TBLTEXTFILE:
+                inputFormat = org.apache.hadoop.mapred.TextInputFormat.class.getName();
+                outputFormat = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName();
+
+                break;
+
+            case HiveParser.TOK_LIKETABLE:
+                likeTable = true;
+                break;
+
+            case HiveParser.TOK_IFNOTEXISTS:
+                try {
+                    List<String> tables = db.getTablesByPattern(tableName);
+                    if (tables != null && tables.size() > 0) { // table
+                        // exists
+                        return ast;
                     }
-                    break;
-
-                case HiveParser.TOK_TABLEPARTCOLS:
-                    List<FieldSchema> partCols = BaseSemanticAnalyzer
-                            .getColumns((ASTNode) child.getChild(0), false);
-                    for (FieldSchema fs : partCols) {
-                        if (!fs.getType().equalsIgnoreCase("string")) {
-                            throw new SemanticException(
-                                    "Operation not supported. HCatalog only " +
-                                    "supports partition columns of type string. "
-                                            + "For column: "
-                                            + fs.getName()
-                                            + " Found type: " + fs.getType());
-                        }
+                } catch (HiveException e) {
+                    throw new SemanticException(e);
+                }
+                break;
+
+            case HiveParser.TOK_TABLEPARTCOLS:
+                List<FieldSchema> partCols = BaseSemanticAnalyzer
+                    .getColumns((ASTNode) child.getChild(0), false);
+                for (FieldSchema fs : partCols) {
+                    if (!fs.getType().equalsIgnoreCase("string")) {
+                        throw new SemanticException(
+                            "Operation not supported. HCatalog only " +
+                                "supports partition columns of type string. "
+                                + "For column: "
+                                + fs.getName()
+                                + " Found type: " + fs.getType());
                     }
-                    break;
+                }
+                break;
 
-                case HiveParser.TOK_STORAGEHANDLER:
-                    String storageHandler = BaseSemanticAnalyzer
-                            .unescapeSQLString(child.getChild(0).getText());
-                    if (org.apache.commons.lang.StringUtils
-                            .isNotEmpty(storageHandler)) {
-                        return ast;
-                    }
+            case HiveParser.TOK_STORAGEHANDLER:
+                String storageHandler = BaseSemanticAnalyzer
+                    .unescapeSQLString(child.getChild(0).getText());
+                if (org.apache.commons.lang.StringUtils
+                    .isNotEmpty(storageHandler)) {
+                    return ast;
+                }
 
-                    break;
+                break;
 
-                case HiveParser.TOK_TABLEFILEFORMAT:
-                    if (child.getChildCount() < 2) {
-                        throw new SemanticException(
-                                "Incomplete specification of File Format. " +
-                                "You must provide InputFormat, OutputFormat.");
-                    }
-                    inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
-                            .getChild(0).getText());
-                    outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
-                            .getChild(1).getText());
-                    break;
-
-                case HiveParser.TOK_TBLRCFILE:
-                    inputFormat = RCFileInputFormat.class.getName();
-                    outputFormat = RCFileOutputFormat.class.getName();
-                    break;
+            case HiveParser.TOK_TABLEFILEFORMAT:
+                if (child.getChildCount() < 2) {
+                    throw new SemanticException(
+                        "Incomplete specification of File Format. " +
+                            "You must provide InputFormat, OutputFormat.");
+                }
+                inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
+                    .getChild(0).getText());
+                outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
+                    .getChild(1).getText());
+                break;
+
+            case HiveParser.TOK_TBLRCFILE:
+                inputFormat = RCFileInputFormat.class.getName();
+                outputFormat = RCFileOutputFormat.class.getName();
+                break;
 
             }
         }
-        
+
         if (!likeTable && (inputFormat == null || outputFormat == null)) {
             throw new SemanticException(
-                    "STORED AS specification is either incomplete or incorrect.");
+                "STORED AS specification is either incomplete or incorrect.");
         }
 
 
@@ -166,8 +166,8 @@ final class CreateTableHook extends HCat
 
     @Override
     public void postAnalyze(HiveSemanticAnalyzerHookContext context,
-            List<Task<? extends Serializable>> rootTasks)
-            throws SemanticException {
+                            List<Task<? extends Serializable>> rootTasks)
+        throws SemanticException {
 
         if (rootTasks.size() == 0) {
             // There will be no DDL task created in case if its CREATE TABLE IF
@@ -175,12 +175,12 @@ final class CreateTableHook extends HCat
             return;
         }
         CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1))
-                .getWork().getCreateTblDesc();
+            .getWork().getCreateTblDesc();
         if (desc == null) {
-          // Desc will be null if its CREATE TABLE LIKE. Desc will be
-          // contained in CreateTableLikeDesc. Currently, HCat disallows CTLT in
-          // pre-hook. So, desc can never be null.
-          return;
+            // Desc will be null if its CREATE TABLE LIKE. Desc will be
+            // contained in CreateTableLikeDesc. Currently, HCat disallows CTLT in
+            // pre-hook. So, desc can never be null.
+            return;
         }
         Map<String, String> tblProps = desc.getTblProps();
         if (tblProps == null) {
@@ -196,11 +196,11 @@ final class CreateTableHook extends HCat
         } else {
             try {
                 HCatStorageHandler storageHandlerInst = HCatUtil
-                        .getStorageHandler(context.getConf(),
-                                                     desc.getStorageHandler(),
-                                                     desc.getSerName(),
-                                                     desc.getInputFormat(),
-                                                     desc.getOutputFormat());
+                    .getStorageHandler(context.getConf(),
+                        desc.getStorageHandler(),
+                        desc.getSerName(),
+                        desc.getInputFormat(),
+                        desc.getOutputFormat());
                 //Authorization checks are performed by the storageHandler.getAuthorizationProvider(), if  
                 //StorageDelegationAuthorizationProvider is used.
             } catch (IOException e) {
@@ -209,33 +209,33 @@ final class CreateTableHook extends HCat
         }
 
         if (desc != null) {
-          try {
-            Table table = context.getHive().newTable(desc.getTableName());
-            if (desc.getLocation() != null) {
-              table.setDataLocation(new Path(desc.getLocation()).toUri());
-            }
-            if (desc.getStorageHandler() != null) {
-              table.setProperty(
-                org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE,
-                desc.getStorageHandler());
-            }
-            for (Map.Entry<String, String> prop : tblProps.entrySet()) {
-              table.setProperty(prop.getKey(), prop.getValue());
-            }
-            for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
-              table.setSerdeParam(prop.getKey(), prop.getValue());
-            }
-            //TODO: set other Table properties as needed
-  
-            //authorize against the table operation so that location permissions can be checked if any
-            
-            if (HiveConf.getBoolVar(context.getConf(),
-                HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
-              authorize(table, Privilege.CREATE);
+            try {
+                Table table = context.getHive().newTable(desc.getTableName());
+                if (desc.getLocation() != null) {
+                    table.setDataLocation(new Path(desc.getLocation()).toUri());
+                }
+                if (desc.getStorageHandler() != null) {
+                    table.setProperty(
+                        org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE,
+                        desc.getStorageHandler());
+                }
+                for (Map.Entry<String, String> prop : tblProps.entrySet()) {
+                    table.setProperty(prop.getKey(), prop.getValue());
+                }
+                for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
+                    table.setSerdeParam(prop.getKey(), prop.getValue());
+                }
+                //TODO: set other Table properties as needed
+
+                //authorize against the table operation so that location permissions can be checked if any
+
+                if (HiveConf.getBoolVar(context.getConf(),
+                    HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+                    authorize(table, Privilege.CREATE);
+                }
+            } catch (HiveException ex) {
+                throw new SemanticException(ex);
             }
-          } catch (HiveException ex) {
-            throw new SemanticException(ex);
-          }
         }
 
         desc.setTblProps(tblProps);

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java Mon Sep 10 23:28:55 2012
@@ -50,326 +50,325 @@ import org.apache.hcatalog.common.HCatEx
 
 public class HCatSemanticAnalyzer extends HCatSemanticAnalyzerBase {
 
-  private AbstractSemanticAnalyzerHook hook;
-  private ASTNode ast;
+    private AbstractSemanticAnalyzerHook hook;
+    private ASTNode ast;
 
 
+    @Override
+    public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
+        throws SemanticException {
+
+        this.ast = ast;
+        switch (ast.getToken().getType()) {
+
+        // HCat wants to intercept following tokens and special-handle them.
+        case HiveParser.TOK_CREATETABLE:
+            hook = new CreateTableHook();
+            return hook.preAnalyze(context, ast);
+
+        case HiveParser.TOK_CREATEDATABASE:
+            hook = new CreateDatabaseHook();
+            return hook.preAnalyze(context, ast);
+
+        case HiveParser.TOK_ALTERTABLE_PARTITION:
+            if (((ASTNode) ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
+                return ast;
+            } else if (((ASTNode) ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES) {
+                // unsupported
+                throw new SemanticException("Operation not supported.");
+            } else {
+                return ast;
+            }
+
+            // HCat will allow these operations to be performed.
+            // Database DDL
+        case HiveParser.TOK_SHOWDATABASES:
+        case HiveParser.TOK_DROPDATABASE:
+        case HiveParser.TOK_SWITCHDATABASE:
+        case HiveParser.TOK_DESCDATABASE:
+        case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
+
+            // Index DDL
+        case HiveParser.TOK_ALTERINDEX_PROPERTIES:
+        case HiveParser.TOK_CREATEINDEX:
+        case HiveParser.TOK_DROPINDEX:
+        case HiveParser.TOK_SHOWINDEXES:
+
+            // View DDL
+            // "alter view add partition" does not work because of the nature of implementation
+            // of the DDL in hive. Hive will internally invoke another Driver on the select statement,
+            // and HCat does not let "select" statement through. I cannot find a way to get around it
+            // without modifying hive code. So just leave it unsupported.
+            //case HiveParser.TOK_ALTERVIEW_ADDPARTS:
+        case HiveParser.TOK_ALTERVIEW_DROPPARTS:
+        case HiveParser.TOK_ALTERVIEW_PROPERTIES:
+        case HiveParser.TOK_ALTERVIEW_RENAME:
+        case HiveParser.TOK_CREATEVIEW:
+        case HiveParser.TOK_DROPVIEW:
+
+            // Authorization DDL
+        case HiveParser.TOK_CREATEROLE:
+        case HiveParser.TOK_DROPROLE:
+        case HiveParser.TOK_GRANT_ROLE:
+        case HiveParser.TOK_GRANT_WITH_OPTION:
+        case HiveParser.TOK_GRANT:
+        case HiveParser.TOK_REVOKE_ROLE:
+        case HiveParser.TOK_REVOKE:
+        case HiveParser.TOK_SHOW_GRANT:
+        case HiveParser.TOK_SHOW_ROLE_GRANT:
+
+            // Misc DDL
+        case HiveParser.TOK_LOCKTABLE:
+        case HiveParser.TOK_UNLOCKTABLE:
+        case HiveParser.TOK_SHOWLOCKS:
+        case HiveParser.TOK_DESCFUNCTION:
+        case HiveParser.TOK_SHOWFUNCTIONS:
+        case HiveParser.TOK_EXPLAIN:
+
+            // Table DDL
+        case HiveParser.TOK_ALTERTABLE_ADDPARTS:
+        case HiveParser.TOK_ALTERTABLE_ADDCOLS:
+        case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
+        case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
+        case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
+        case HiveParser.TOK_ALTERTABLE_DROPPARTS:
+        case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+        case HiveParser.TOK_ALTERTABLE_RENAME:
+        case HiveParser.TOK_ALTERTABLE_RENAMECOL:
+        case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
+        case HiveParser.TOK_ALTERTABLE_SERIALIZER:
+        case HiveParser.TOK_ALTERTABLE_TOUCH:
+        case HiveParser.TOK_DESCTABLE:
+        case HiveParser.TOK_DROPTABLE:
+        case HiveParser.TOK_SHOW_TABLESTATUS:
+        case HiveParser.TOK_SHOWPARTITIONS:
+        case HiveParser.TOK_SHOWTABLES:
+            return ast;
 
-  @Override
-  public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
-      throws SemanticException {
-
-      this.ast = ast;
-      switch (ast.getToken().getType()) {
-
-      // HCat wants to intercept following tokens and special-handle them.
-      case HiveParser.TOK_CREATETABLE:
-        hook = new CreateTableHook();
-        return hook.preAnalyze(context, ast);
-
-      case HiveParser.TOK_CREATEDATABASE:
-        hook = new CreateDatabaseHook();
-        return hook.preAnalyze(context, ast);
+        // In all other cases, throw an exception. Its a white-list of allowed operations.
+        default:
+            throw new SemanticException("Operation not supported.");
 
-      case HiveParser.TOK_ALTERTABLE_PARTITION:
-          if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
-            return ast;
-          } else if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES){
-              // unsupported
-              throw new SemanticException("Operation not supported.");
-          } else {
-              return ast;
-          }
-
-      // HCat will allow these operations to be performed.
-      // Database DDL
-      case HiveParser.TOK_SHOWDATABASES:
-      case HiveParser.TOK_DROPDATABASE:
-      case HiveParser.TOK_SWITCHDATABASE:
-      case HiveParser.TOK_DESCDATABASE:
-      case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
-
-      // Index DDL
-      case HiveParser.TOK_ALTERINDEX_PROPERTIES:
-      case HiveParser.TOK_CREATEINDEX:
-      case HiveParser.TOK_DROPINDEX:
-      case HiveParser.TOK_SHOWINDEXES:
-
-      // View DDL
-      // "alter view add partition" does not work because of the nature of implementation
-      // of the DDL in hive. Hive will internally invoke another Driver on the select statement,
-      // and HCat does not let "select" statement through. I cannot find a way to get around it
-      // without modifying hive code. So just leave it unsupported.
-      //case HiveParser.TOK_ALTERVIEW_ADDPARTS:
-      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
-      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
-      case HiveParser.TOK_ALTERVIEW_RENAME:
-      case HiveParser.TOK_CREATEVIEW:
-      case HiveParser.TOK_DROPVIEW:
-
-      // Authorization DDL
-      case HiveParser.TOK_CREATEROLE:
-      case HiveParser.TOK_DROPROLE:
-      case HiveParser.TOK_GRANT_ROLE:
-      case HiveParser.TOK_GRANT_WITH_OPTION:
-      case HiveParser.TOK_GRANT:
-      case HiveParser.TOK_REVOKE_ROLE:
-      case HiveParser.TOK_REVOKE:
-      case HiveParser.TOK_SHOW_GRANT:
-      case HiveParser.TOK_SHOW_ROLE_GRANT:
-
-      // Misc DDL
-      case HiveParser.TOK_LOCKTABLE:
-      case HiveParser.TOK_UNLOCKTABLE:
-      case HiveParser.TOK_SHOWLOCKS:
-      case HiveParser.TOK_DESCFUNCTION:
-      case HiveParser.TOK_SHOWFUNCTIONS:
-      case HiveParser.TOK_EXPLAIN:
-
-      // Table DDL
-      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-      case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
-      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
-      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
-      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_RENAME:
-      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
-      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
-      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-      case HiveParser.TOK_ALTERTABLE_TOUCH:
-      case HiveParser.TOK_DESCTABLE:
-      case HiveParser.TOK_DROPTABLE:
-      case HiveParser.TOK_SHOW_TABLESTATUS:
-      case HiveParser.TOK_SHOWPARTITIONS:
-      case HiveParser.TOK_SHOWTABLES:
-        return ast;
-
-      // In all other cases, throw an exception. Its a white-list of allowed operations.
-      default:
-        throw new SemanticException("Operation not supported.");
-
-      }
-  }
-
-  @Override
-  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
-      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
-
-    try{
-
-      switch (ast.getToken().getType()) {
-
-      case HiveParser.TOK_CREATETABLE:
-      case HiveParser.TOK_CREATEDATABASE:
-      case HiveParser.TOK_ALTERTABLE_PARTITION:
-
-      // HCat will allow these operations to be performed.
-      // Database DDL
-      case HiveParser.TOK_SHOWDATABASES:
-      case HiveParser.TOK_DROPDATABASE:
-      case HiveParser.TOK_SWITCHDATABASE:
-      case HiveParser.TOK_DESCDATABASE:
-      case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
-
-      // Index DDL
-      case HiveParser.TOK_ALTERINDEX_PROPERTIES:
-      case HiveParser.TOK_CREATEINDEX:
-      case HiveParser.TOK_DROPINDEX:
-      case HiveParser.TOK_SHOWINDEXES:
-
-      // View DDL
-      //case HiveParser.TOK_ALTERVIEW_ADDPARTS:
-      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
-      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
-      case HiveParser.TOK_ALTERVIEW_RENAME:
-      case HiveParser.TOK_CREATEVIEW:
-      case HiveParser.TOK_DROPVIEW:
-
-      // Authorization DDL
-      case HiveParser.TOK_CREATEROLE:
-      case HiveParser.TOK_DROPROLE:
-      case HiveParser.TOK_GRANT_ROLE:
-      case HiveParser.TOK_GRANT_WITH_OPTION:
-      case HiveParser.TOK_GRANT:
-      case HiveParser.TOK_REVOKE_ROLE:
-      case HiveParser.TOK_REVOKE:
-      case HiveParser.TOK_SHOW_GRANT:
-      case HiveParser.TOK_SHOW_ROLE_GRANT:
-
-      // Misc DDL
-      case HiveParser.TOK_LOCKTABLE:
-      case HiveParser.TOK_UNLOCKTABLE:
-      case HiveParser.TOK_SHOWLOCKS:
-      case HiveParser.TOK_DESCFUNCTION:
-      case HiveParser.TOK_SHOWFUNCTIONS:
-      case HiveParser.TOK_EXPLAIN:
-
-      // Table DDL
-      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-      case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
-      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
-      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
-      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_RENAME:
-      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
-      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
-      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-      case HiveParser.TOK_ALTERTABLE_TOUCH:
-      case HiveParser.TOK_DESCTABLE:
-      case HiveParser.TOK_DROPTABLE:
-      case HiveParser.TOK_SHOW_TABLESTATUS:
-      case HiveParser.TOK_SHOWPARTITIONS:
-      case HiveParser.TOK_SHOWTABLES:
-        break;
-
-      default:
-        throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, "Unexpected token: "+ast.getToken());
-      }
-
-      authorizeDDL(context, rootTasks);
-
-    } catch(HCatException e){
-      throw new SemanticException(e);
-    } catch (HiveException e) {
-      throw new SemanticException(e);
+        }
     }
 
-    if(hook != null){
-      hook.postAnalyze(context, rootTasks);
-    }
-  }
+    @Override
+    public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+                            List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+
+        try {
+
+            switch (ast.getToken().getType()) {
+
+            case HiveParser.TOK_CREATETABLE:
+            case HiveParser.TOK_CREATEDATABASE:
+            case HiveParser.TOK_ALTERTABLE_PARTITION:
+
+                // HCat will allow these operations to be performed.
+                // Database DDL
+            case HiveParser.TOK_SHOWDATABASES:
+            case HiveParser.TOK_DROPDATABASE:
+            case HiveParser.TOK_SWITCHDATABASE:
+            case HiveParser.TOK_DESCDATABASE:
+            case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
+
+                // Index DDL
+            case HiveParser.TOK_ALTERINDEX_PROPERTIES:
+            case HiveParser.TOK_CREATEINDEX:
+            case HiveParser.TOK_DROPINDEX:
+            case HiveParser.TOK_SHOWINDEXES:
+
+                // View DDL
+                //case HiveParser.TOK_ALTERVIEW_ADDPARTS:
+            case HiveParser.TOK_ALTERVIEW_DROPPARTS:
+            case HiveParser.TOK_ALTERVIEW_PROPERTIES:
+            case HiveParser.TOK_ALTERVIEW_RENAME:
+            case HiveParser.TOK_CREATEVIEW:
+            case HiveParser.TOK_DROPVIEW:
+
+                // Authorization DDL
+            case HiveParser.TOK_CREATEROLE:
+            case HiveParser.TOK_DROPROLE:
+            case HiveParser.TOK_GRANT_ROLE:
+            case HiveParser.TOK_GRANT_WITH_OPTION:
+            case HiveParser.TOK_GRANT:
+            case HiveParser.TOK_REVOKE_ROLE:
+            case HiveParser.TOK_REVOKE:
+            case HiveParser.TOK_SHOW_GRANT:
+            case HiveParser.TOK_SHOW_ROLE_GRANT:
+
+                // Misc DDL
+            case HiveParser.TOK_LOCKTABLE:
+            case HiveParser.TOK_UNLOCKTABLE:
+            case HiveParser.TOK_SHOWLOCKS:
+            case HiveParser.TOK_DESCFUNCTION:
+            case HiveParser.TOK_SHOWFUNCTIONS:
+            case HiveParser.TOK_EXPLAIN:
+
+                // Table DDL
+            case HiveParser.TOK_ALTERTABLE_ADDPARTS:
+            case HiveParser.TOK_ALTERTABLE_ADDCOLS:
+            case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
+            case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
+            case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
+            case HiveParser.TOK_ALTERTABLE_DROPPARTS:
+            case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+            case HiveParser.TOK_ALTERTABLE_RENAME:
+            case HiveParser.TOK_ALTERTABLE_RENAMECOL:
+            case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
+            case HiveParser.TOK_ALTERTABLE_SERIALIZER:
+            case HiveParser.TOK_ALTERTABLE_TOUCH:
+            case HiveParser.TOK_DESCTABLE:
+            case HiveParser.TOK_DROPTABLE:
+            case HiveParser.TOK_SHOW_TABLESTATUS:
+            case HiveParser.TOK_SHOWPARTITIONS:
+            case HiveParser.TOK_SHOWTABLES:
+                break;
+
+            default:
+                throw new HCatException(ErrorType.ERROR_INTERNAL_EXCEPTION, "Unexpected token: " + ast.getToken());
+            }
+
+            authorizeDDL(context, rootTasks);
+
+        } catch (HCatException e) {
+            throw new SemanticException(e);
+        } catch (HiveException e) {
+            throw new SemanticException(e);
+        }
 
-  private String extractTableName (String compoundName) {
-    /* 
-     * the table name can potentially be a dot-format one with column names
-     * specified as part of the table name. e.g. a.b.c where b is a column in
-     * a and c is a field of the object/column b etc. For authorization 
-     * purposes, we should use only the first part of the dotted name format.
-     *
-     */
-
-   String [] words = compoundName.split("\\.");
-   return words[0];
-  }
-
-  @Override
-  protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work)
-      throws HiveException {
-    // DB opereations, none of them are enforced by Hive right now.
-
-    ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc();
-    if (showDatabases != null) {
-      authorize(HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(),
-          HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges());
+        if (hook != null) {
+            hook.postAnalyze(context, rootTasks);
+        }
     }
 
-    DropDatabaseDesc dropDb = work.getDropDatabaseDesc();
-    if (dropDb != null) {
-      Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName());
-      authorize(db, Privilege.DROP);
-    }
+    private String extractTableName(String compoundName) {
+        /*
+        * the table name can potentially be a dot-format one with column names
+        * specified as part of the table name. e.g. a.b.c where b is a column in
+        * a and c is a field of the object/column b etc. For authorization
+        * purposes, we should use only the first part of the dotted name format.
+        *
+        */
+
+        String[] words = compoundName.split("\\.");
+        return words[0];
+    }
+
+    @Override
+    protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext cntxt, Hive hive, DDLWork work)
+        throws HiveException {
+        // DB opereations, none of them are enforced by Hive right now.
+
+        ShowDatabasesDesc showDatabases = work.getShowDatabasesDesc();
+        if (showDatabases != null) {
+            authorize(HiveOperation.SHOWDATABASES.getInputRequiredPrivileges(),
+                HiveOperation.SHOWDATABASES.getOutputRequiredPrivileges());
+        }
 
-    DescDatabaseDesc descDb = work.getDescDatabaseDesc();
-    if (descDb != null) {
-      Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName());
-      authorize(db, Privilege.SELECT);
-    }
+        DropDatabaseDesc dropDb = work.getDropDatabaseDesc();
+        if (dropDb != null) {
+            Database db = cntxt.getHive().getDatabase(dropDb.getDatabaseName());
+            authorize(db, Privilege.DROP);
+        }
 
-    SwitchDatabaseDesc switchDb = work.getSwitchDatabaseDesc();
-    if (switchDb != null) {
-      Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName());
-      authorize(db, Privilege.SELECT);
-    }
+        DescDatabaseDesc descDb = work.getDescDatabaseDesc();
+        if (descDb != null) {
+            Database db = cntxt.getHive().getDatabase(descDb.getDatabaseName());
+            authorize(db, Privilege.SELECT);
+        }
 
-    ShowTablesDesc showTables = work.getShowTblsDesc();
-    if (showTables != null) {
-      String dbName = showTables.getDbName() == null ? cntxt.getHive().getCurrentDatabase()
-          : showTables.getDbName();
-      authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
-    }
+        SwitchDatabaseDesc switchDb = work.getSwitchDatabaseDesc();
+        if (switchDb != null) {
+            Database db = cntxt.getHive().getDatabase(switchDb.getDatabaseName());
+            authorize(db, Privilege.SELECT);
+        }
 
-    ShowTableStatusDesc showTableStatus = work.getShowTblStatusDesc();
-    if (showTableStatus != null) {
-      String dbName = showTableStatus.getDbName() == null ? cntxt.getHive().getCurrentDatabase()
-          : showTableStatus.getDbName();
-      authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
-    }
+        ShowTablesDesc showTables = work.getShowTblsDesc();
+        if (showTables != null) {
+            String dbName = showTables.getDbName() == null ? cntxt.getHive().getCurrentDatabase()
+                : showTables.getDbName();
+            authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
+        }
 
-    // TODO: add alter database support in HCat
+        ShowTableStatusDesc showTableStatus = work.getShowTblStatusDesc();
+        if (showTableStatus != null) {
+            String dbName = showTableStatus.getDbName() == null ? cntxt.getHive().getCurrentDatabase()
+                : showTableStatus.getDbName();
+            authorize(cntxt.getHive().getDatabase(dbName), Privilege.SELECT);
+        }
 
-    // Table operations.
+        // TODO: add alter database support in HCat
 
-    DropTableDesc dropTable = work.getDropTblDesc();
-    if (dropTable != null) {
-      if (dropTable.getPartSpecs() == null) {
-        // drop table is already enforced by Hive. We only check for table level location even if the
-        // table is partitioned.
-      } else {
-        //this is actually a ALTER TABLE DROP PARITITION statement
-        for (PartitionSpec partSpec : dropTable.getPartSpecs()) {
-          // partitions are not added as write entries in drop partitions in Hive
-          Table table = hive.getTable(hive.getCurrentDatabase(), dropTable.getTableName());
-          List<Partition> partitions = null;
-          try {
-            partitions = hive.getPartitionsByFilter(table, partSpec.toString());
-           } catch (Exception e) {
-            throw new HiveException(e);
-           }
-
-          for (Partition part : partitions) {
-            authorize(part, Privilege.DROP);
-          }
+        // Table operations.
+
+        DropTableDesc dropTable = work.getDropTblDesc();
+        if (dropTable != null) {
+            if (dropTable.getPartSpecs() == null) {
+                // drop table is already enforced by Hive. We only check for table level location even if the
+                // table is partitioned.
+            } else {
+                //this is actually a ALTER TABLE DROP PARITITION statement
+                for (PartitionSpec partSpec : dropTable.getPartSpecs()) {
+                    // partitions are not added as write entries in drop partitions in Hive
+                    Table table = hive.getTable(hive.getCurrentDatabase(), dropTable.getTableName());
+                    List<Partition> partitions = null;
+                    try {
+                        partitions = hive.getPartitionsByFilter(table, partSpec.toString());
+                    } catch (Exception e) {
+                        throw new HiveException(e);
+                    }
+
+                    for (Partition part : partitions) {
+                        authorize(part, Privilege.DROP);
+                    }
+                }
+            }
         }
-      }
-    }
 
-    AlterTableDesc alterTable = work.getAlterTblDesc();
-    if (alterTable != null) {
-      Table table = hive.getTable(hive.getCurrentDatabase(), alterTable.getOldName(), false);
-
-      Partition part = null;
-      if (alterTable.getPartSpec() != null) {
-        part = hive.getPartition(table, alterTable.getPartSpec(), false);
-      }
-
-      String newLocation = alterTable.getNewLocation();
-
-      /* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
-       * for the old table/partition location and the new location.
-       */
-      if (alterTable.getOp() == AlterTableDesc.AlterTableTypes.ALTERLOCATION) {
-        if (part != null) {
-          authorize(part, Privilege.ALTER_DATA); // authorize for the old
-                                                 // location, and new location
-          part.setLocation(newLocation);
-          authorize(part, Privilege.ALTER_DATA);
-        } else {
-          authorize(table, Privilege.ALTER_DATA); // authorize for the old
-                                                  // location, and new location
-          table.getTTable().getSd().setLocation(newLocation);
-          authorize(table, Privilege.ALTER_DATA);
+        AlterTableDesc alterTable = work.getAlterTblDesc();
+        if (alterTable != null) {
+            Table table = hive.getTable(hive.getCurrentDatabase(), alterTable.getOldName(), false);
+
+            Partition part = null;
+            if (alterTable.getPartSpec() != null) {
+                part = hive.getPartition(table, alterTable.getPartSpec(), false);
+            }
+
+            String newLocation = alterTable.getNewLocation();
+
+            /* Hcat requires ALTER_DATA privileges for ALTER TABLE LOCATION statements
+            * for the old table/partition location and the new location.
+            */
+            if (alterTable.getOp() == AlterTableDesc.AlterTableTypes.ALTERLOCATION) {
+                if (part != null) {
+                    authorize(part, Privilege.ALTER_DATA); // authorize for the old
+                    // location, and new location
+                    part.setLocation(newLocation);
+                    authorize(part, Privilege.ALTER_DATA);
+                } else {
+                    authorize(table, Privilege.ALTER_DATA); // authorize for the old
+                    // location, and new location
+                    table.getTTable().getSd().setLocation(newLocation);
+                    authorize(table, Privilege.ALTER_DATA);
+                }
+            }
+            //other alter operations are already supported by Hive
         }
-      }
-      //other alter operations are already supported by Hive
-    }
 
-    // we should be careful when authorizing table based on just the 
-    // table name. If columns have separate authorization domain, it 
-    // must be honored
-    DescTableDesc descTable = work.getDescTblDesc();
-    if (descTable != null) {
-      String tableName = extractTableName(descTable.getTableName());
-      authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
-    }
+        // we should be careful when authorizing table based on just the
+        // table name. If columns have separate authorization domain, it
+        // must be honored
+        DescTableDesc descTable = work.getDescTblDesc();
+        if (descTable != null) {
+            String tableName = extractTableName(descTable.getTableName());
+            authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
+        }
 
-    ShowPartitionsDesc showParts = work.getShowPartsDesc();
-    if (showParts != null) {
-      String tableName = extractTableName(showParts.getTabName());
-      authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
+        ShowPartitionsDesc showParts = work.getShowPartsDesc();
+        if (showParts != null) {
+            String tableName = extractTableName(showParts.getTabName());
+            authorizeTable(cntxt.getHive(), tableName, Privilege.SELECT);
+        }
     }
-  }
 }

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java?rev=1383152&r1=1383151&r2=1383152&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzerBase.java Mon Sep 10 23:28:55 2012
@@ -38,142 +38,141 @@ import org.apache.hadoop.hive.ql.securit
 import org.apache.hadoop.hive.ql.security.authorization.Privilege;
 import org.apache.hadoop.hive.ql.session.SessionState;
 
-/** 
+/**
  * Base class for HCatSemanticAnalyzer hooks.
  */
 public class HCatSemanticAnalyzerBase extends AbstractSemanticAnalyzerHook {
 
-  private HiveAuthorizationProvider authProvider;
-  
-  protected String getDbName(Hive hive, String dbName) {
-    return dbName == null ? hive.getCurrentDatabase() : dbName;
-  }
-  
-  public HiveAuthorizationProvider getAuthProvider() {
-    if (authProvider == null) {
-      authProvider = SessionState.get().getAuthorizer();
-    }
-    
-    return authProvider;
-  }
-
-  @Override
-  public void postAnalyze(HiveSemanticAnalyzerHookContext context,
-      List<Task<? extends Serializable>> rootTasks) throws SemanticException {
-    super.postAnalyze(context, rootTasks);
-    
-    //Authorize the operation.
-    authorizeDDL(context, rootTasks);
-  }
-  
-  /** 
-   * Checks for the given rootTasks, and calls authorizeDDLWork() for each DDLWork to 
-   * be authorized. The hooks should override this, or authorizeDDLWork to perform the 
-   * actual authorization.
-   */
-  /*
-   * Impl note: Hive provides authorization with it's own model, and calls the defined 
-   * HiveAuthorizationProvider from Driver.doAuthorization(). However, HCat has to 
-   * do additional calls to the auth provider to implement expected behavior for 
-   * StorageDelegationAuthorizationProvider. This means, that the defined auth provider 
-   * is called by both Hive and HCat. The following are missing from Hive's implementation,
-   * and when they are fixed in Hive, we can remove the HCat-specific auth checks.   
-   * 1. CREATE DATABASE/TABLE, ADD PARTITION statements does not call 
-   * HiveAuthorizationProvider.authorize() with the candidate objects, which means that
-   * we cannot do checks against defined LOCATION.
-   * 2. HiveOperation does not define sufficient Privileges for most of the operations, 
-   * especially database operations. 
-   * 3. For some of the operations, Hive SemanticAnalyzer does not add the changed 
-   * object as a WriteEntity or ReadEntity.
-   * 
-   * @see https://issues.apache.org/jira/browse/HCATALOG-244
-   * @see https://issues.apache.org/jira/browse/HCATALOG-245
-   */
-  protected void authorizeDDL(HiveSemanticAnalyzerHookContext context, 
-      List<Task<? extends Serializable>> rootTasks)  throws SemanticException {
-    
-    if (!HiveConf.getBoolVar(context.getConf(),
-        HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
-      return;
-    }
-    
-    Hive hive;
-    try {
-      hive = context.getHive();
-    
-      for (Task<? extends Serializable> task : rootTasks) {
-        if (task.getWork() instanceof DDLWork) {
-          DDLWork work = (DDLWork)task.getWork();
-          if (work != null) {
-            authorizeDDLWork(context, hive, work);
-          }
-        }
-      }
-    } catch (SemanticException ex) {
-      throw ex;
-    } catch (AuthorizationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new SemanticException(ex);
-    }
-  }
-  
-  /** 
-   * Authorized the given DDLWork. Does nothing by default. Override this 
-   * and delegate to the relevant method in HiveAuthorizationProvider obtained by 
-   * getAuthProvider().
-   */
-  protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context,
-      Hive hive, DDLWork work) throws HiveException {
-  }
-
-  protected void authorize(Privilege[] inputPrivs, Privilege[] outputPrivs)
-      throws AuthorizationException, SemanticException {
-    try {
-      getAuthProvider().authorize(inputPrivs, outputPrivs);
-    } catch (HiveException ex) {
-      throw new SemanticException(ex);
-    }
-  }
-  
-  protected void authorize(Database db, Privilege priv) 
-      throws AuthorizationException, SemanticException {
-    try {
-      getAuthProvider().authorize(db, null, new Privilege[] {priv});
-    } catch (HiveException ex) {
-      throw new SemanticException(ex);
-    }
-  }
-  
-  protected void authorizeTable(Hive hive, String tableName, Privilege priv) 
-      throws AuthorizationException, HiveException {
-    Table table;
-    try{
-      table = hive.getTable(tableName);
-    }
-    catch(InvalidTableException ite){
-      // Table itself doesn't exist in metastore, nothing to validate.
-      return;
-    }
-    
-    authorize(table, priv);
-  }
-  
-  protected void authorize(Table table, Privilege priv) 
-      throws AuthorizationException, SemanticException {
-    try {
-      getAuthProvider().authorize(table, new Privilege[] {priv}, null);
-    } catch (HiveException ex) {
-      throw new SemanticException(ex);
-    }
-  }
-  
-  protected void authorize(Partition part, Privilege priv) 
-      throws AuthorizationException, SemanticException {
-    try {
-      getAuthProvider().authorize(part, new Privilege[] {priv}, null);
-    } catch (HiveException ex) {
-      throw new SemanticException(ex);
+    private HiveAuthorizationProvider authProvider;
+
+    protected String getDbName(Hive hive, String dbName) {
+        return dbName == null ? hive.getCurrentDatabase() : dbName;
+    }
+
+    public HiveAuthorizationProvider getAuthProvider() {
+        if (authProvider == null) {
+            authProvider = SessionState.get().getAuthorizer();
+        }
+
+        return authProvider;
+    }
+
+    @Override
+    public void postAnalyze(HiveSemanticAnalyzerHookContext context,
+                            List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+        super.postAnalyze(context, rootTasks);
+
+        //Authorize the operation.
+        authorizeDDL(context, rootTasks);
+    }
+
+    /**
+     * Checks for the given rootTasks, and calls authorizeDDLWork() for each DDLWork to
+     * be authorized. The hooks should override this, or authorizeDDLWork to perform the
+     * actual authorization.
+     */
+    /*
+    * Impl note: Hive provides authorization with it's own model, and calls the defined
+    * HiveAuthorizationProvider from Driver.doAuthorization(). However, HCat has to
+    * do additional calls to the auth provider to implement expected behavior for
+    * StorageDelegationAuthorizationProvider. This means, that the defined auth provider
+    * is called by both Hive and HCat. The following are missing from Hive's implementation,
+    * and when they are fixed in Hive, we can remove the HCat-specific auth checks.
+    * 1. CREATE DATABASE/TABLE, ADD PARTITION statements does not call
+    * HiveAuthorizationProvider.authorize() with the candidate objects, which means that
+    * we cannot do checks against defined LOCATION.
+    * 2. HiveOperation does not define sufficient Privileges for most of the operations,
+    * especially database operations.
+    * 3. For some of the operations, Hive SemanticAnalyzer does not add the changed
+    * object as a WriteEntity or ReadEntity.
+    *
+    * @see https://issues.apache.org/jira/browse/HCATALOG-244
+    * @see https://issues.apache.org/jira/browse/HCATALOG-245
+    */
+    protected void authorizeDDL(HiveSemanticAnalyzerHookContext context,
+                                List<Task<? extends Serializable>> rootTasks) throws SemanticException {
+
+        if (!HiveConf.getBoolVar(context.getConf(),
+            HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED)) {
+            return;
+        }
+
+        Hive hive;
+        try {
+            hive = context.getHive();
+
+            for (Task<? extends Serializable> task : rootTasks) {
+                if (task.getWork() instanceof DDLWork) {
+                    DDLWork work = (DDLWork) task.getWork();
+                    if (work != null) {
+                        authorizeDDLWork(context, hive, work);
+                    }
+                }
+            }
+        } catch (SemanticException ex) {
+            throw ex;
+        } catch (AuthorizationException ex) {
+            throw ex;
+        } catch (Exception ex) {
+            throw new SemanticException(ex);
+        }
+    }
+
+    /**
+     * Authorized the given DDLWork. Does nothing by default. Override this
+     * and delegate to the relevant method in HiveAuthorizationProvider obtained by
+     * getAuthProvider().
+     */
+    protected void authorizeDDLWork(HiveSemanticAnalyzerHookContext context,
+                                    Hive hive, DDLWork work) throws HiveException {
+    }
+
+    protected void authorize(Privilege[] inputPrivs, Privilege[] outputPrivs)
+        throws AuthorizationException, SemanticException {
+        try {
+            getAuthProvider().authorize(inputPrivs, outputPrivs);
+        } catch (HiveException ex) {
+            throw new SemanticException(ex);
+        }
+    }
+
+    protected void authorize(Database db, Privilege priv)
+        throws AuthorizationException, SemanticException {
+        try {
+            getAuthProvider().authorize(db, null, new Privilege[]{priv});
+        } catch (HiveException ex) {
+            throw new SemanticException(ex);
+        }
+    }
+
+    protected void authorizeTable(Hive hive, String tableName, Privilege priv)
+        throws AuthorizationException, HiveException {
+        Table table;
+        try {
+            table = hive.getTable(tableName);
+        } catch (InvalidTableException ite) {
+            // Table itself doesn't exist in metastore, nothing to validate.
+            return;
+        }
+
+        authorize(table, priv);
+    }
+
+    protected void authorize(Table table, Privilege priv)
+        throws AuthorizationException, SemanticException {
+        try {
+            getAuthProvider().authorize(table, new Privilege[]{priv}, null);
+        } catch (HiveException ex) {
+            throw new SemanticException(ex);
+        }
+    }
+
+    protected void authorize(Partition part, Privilege priv)
+        throws AuthorizationException, SemanticException {
+        try {
+            getAuthProvider().authorize(part, new Privilege[]{priv}, null);
+        } catch (HiveException ex) {
+            throw new SemanticException(ex);
+        }
     }
-  }
 }



Mime
View raw message