hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gunt...@apache.org
Subject svn commit: r1622108 [26/27] - in /hive/branches/tez: ./ accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/predicate/ beeline/src/java/org/apache/hive/beeline/ beeline/src/test/org/apache/hive/beeline/ bin/ bin/ext/ checkstyle/ common/src/java/...
Date Tue, 02 Sep 2014 19:57:07 GMT
Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/PasswdAuthenticationProvider.java Tue Sep  2 19:56:56 2014
@@ -20,18 +20,20 @@ package org.apache.hive.service.auth;
 import javax.security.sasl.AuthenticationException;
 
 public interface PasswdAuthenticationProvider {
+
   /**
    * The Authenticate method is called by the HiveServer2 authentication layer
    * to authenticate users for their requests.
    * If a user is to be granted, return nothing/throw nothing.
    * When a user is to be disallowed, throw an appropriate {@link AuthenticationException}.
-   *
+   * <p/>
    * For an example implementation, see {@link LdapAuthenticationProviderImpl}.
    *
-   * @param user - The username received over the connection request
-   * @param password - The password received over the connection request
-   * @throws AuthenticationException - When a user is found to be
-   * invalid by the implementation
+   * @param user     The username received over the connection request
+   * @param password The password received over the connection request
+   *
+   * @throws AuthenticationException When a user is found to be
+   *                                 invalid by the implementation
    */
   void Authenticate(String user, String password) throws AuthenticationException;
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslHelper.java Tue Sep  2 19:56:56 2014
@@ -18,8 +18,8 @@
 package org.apache.hive.service.auth;
 
 import java.io.IOException;
+import java.security.Security;
 import java.util.HashMap;
-
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
 import javax.security.auth.callback.NameCallback;
@@ -30,10 +30,8 @@ import javax.security.sasl.Authenticatio
 import javax.security.sasl.AuthorizeCallback;
 import javax.security.sasl.SaslException;
 
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider;
 import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
-import org.apache.hive.service.cli.thrift.TCLIService;
+import org.apache.hive.service.auth.PlainSaslServer.SaslPlainProvider;
 import org.apache.hive.service.cli.thrift.TCLIService.Iface;
 import org.apache.hive.service.cli.thrift.ThriftCLIService;
 import org.apache.thrift.TProcessor;
@@ -43,78 +41,108 @@ import org.apache.thrift.transport.TSasl
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportFactory;
 
-public class PlainSaslHelper {
+public final class PlainSaslHelper {
+
+  public static TProcessorFactory getPlainProcessorFactory(ThriftCLIService service) {
+    return new SQLPlainProcessorFactory(service);
+  }
+
+  // Register Plain SASL server provider
+  static {
+    Security.addProvider(new SaslPlainProvider());
+  }
+
+  public static TTransportFactory getPlainTransportFactory(String authTypeStr)
+    throws LoginException {
+    TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
+    try {
+      saslFactory.addServerDefinition("PLAIN", authTypeStr, null, new HashMap<String, String>(),
+        new PlainServerCallbackHandler(authTypeStr));
+    } catch (AuthenticationException e) {
+      throw new LoginException("Error setting callback handler" + e);
+    }
+    return saslFactory;
+  }
+
+  public static TTransport getPlainTransport(String username, String password,
+    TTransport underlyingTransport) throws SaslException {
+    return new TSaslClientTransport("PLAIN", null, null, null, new HashMap<String, String>(),
+      new PlainCallbackHandler(username, password), underlyingTransport);
+  }
+
+  private PlainSaslHelper() {
+    throw new UnsupportedOperationException("Can't initialize class");
+  }
+
+  private static final class PlainServerCallbackHandler implements CallbackHandler {
 
-  private static class PlainServerCallbackHandler implements CallbackHandler {
     private final AuthMethods authMethod;
-    public PlainServerCallbackHandler(String authMethodStr) throws AuthenticationException {
+
+    PlainServerCallbackHandler(String authMethodStr) throws AuthenticationException {
       authMethod = AuthMethods.getValidAuthMethod(authMethodStr);
     }
 
     @Override
     public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
-      String userName = null;
-      String passWord = null;
+      String username = null;
+      String password = null;
       AuthorizeCallback ac = null;
 
-      for (int i = 0; i < callbacks.length; i++) {
-        if (callbacks[i] instanceof NameCallback) {
-          NameCallback nc = (NameCallback)callbacks[i];
-          userName = nc.getName();
-        } else if (callbacks[i] instanceof PasswordCallback) {
-          PasswordCallback pc = (PasswordCallback)callbacks[i];
-          passWord = new String(pc.getPassword());
-        } else if (callbacks[i] instanceof AuthorizeCallback) {
-          ac = (AuthorizeCallback) callbacks[i];
+      for (Callback callback : callbacks) {
+        if (callback instanceof NameCallback) {
+          NameCallback nc = (NameCallback) callback;
+          username = nc.getName();
+        } else if (callback instanceof PasswordCallback) {
+          PasswordCallback pc = (PasswordCallback) callback;
+          password = new String(pc.getPassword());
+        } else if (callback instanceof AuthorizeCallback) {
+          ac = (AuthorizeCallback) callback;
         } else {
-          throw new UnsupportedCallbackException(callbacks[i]);
+          throw new UnsupportedCallbackException(callback);
         }
       }
       PasswdAuthenticationProvider provider =
-            AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
-      provider.Authenticate(userName, passWord);
+        AuthenticationProviderFactory.getAuthenticationProvider(authMethod);
+      provider.Authenticate(username, password);
       if (ac != null) {
         ac.setAuthorized(true);
       }
     }
   }
 
-  public static class PlainClientbackHandler implements CallbackHandler {
+  public static class PlainCallbackHandler implements CallbackHandler {
 
-    private final String userName;
-    private final String passWord;
+    private final String username;
+    private final String password;
 
-    public PlainClientbackHandler (String userName, String passWord) {
-      this.userName = userName;
-      this.passWord = passWord;
+    public PlainCallbackHandler(String username, String password) {
+      this.username = username;
+      this.password = password;
     }
 
     @Override
-    public void handle(Callback[] callbacks)
-          throws IOException, UnsupportedCallbackException {
-      AuthorizeCallback ac = null;
-      for (int i = 0; i < callbacks.length; i++) {
-        if (callbacks[i] instanceof NameCallback) {
-          NameCallback nameCallback = (NameCallback)callbacks[i];
-          nameCallback.setName(userName);
-        } else if (callbacks[i] instanceof PasswordCallback) {
-          PasswordCallback passCallback = (PasswordCallback) callbacks[i];
-          passCallback.setPassword(passWord.toCharArray());
+    public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
+      for (Callback callback : callbacks) {
+        if (callback instanceof NameCallback) {
+          NameCallback nameCallback = (NameCallback) callback;
+          nameCallback.setName(username);
+        } else if (callback instanceof PasswordCallback) {
+          PasswordCallback passCallback = (PasswordCallback) callback;
+          passCallback.setPassword(password.toCharArray());
         } else {
-          throw new UnsupportedCallbackException(callbacks[i]);
+          throw new UnsupportedCallbackException(callback);
         }
       }
     }
   }
 
-  private static class SQLPlainProcessorFactory extends TProcessorFactory {
+  private static final class SQLPlainProcessorFactory extends TProcessorFactory {
+
     private final ThriftCLIService service;
-    private final HiveConf conf;
 
-    public SQLPlainProcessorFactory(ThriftCLIService service) {
+    SQLPlainProcessorFactory(ThriftCLIService service) {
       super(null);
       this.service = service;
-      this.conf = service.getHiveConf();
     }
 
     @Override
@@ -123,33 +151,4 @@ public class PlainSaslHelper {
     }
   }
 
-  public static TProcessorFactory getPlainProcessorFactory(ThriftCLIService service) {
-    return new SQLPlainProcessorFactory(service);
-  }
-
-  // Register Plain SASL server provider
-  static {
-    java.security.Security.addProvider(new SaslPlainProvider());
-  }
-
-  public static TTransportFactory getPlainTransportFactory(String authTypeStr)
-      throws LoginException {
-    TSaslServerTransport.Factory saslFactory = new TSaslServerTransport.Factory();
-    try {
-      saslFactory.addServerDefinition("PLAIN",
-          authTypeStr, null, new HashMap<String, String>(),
-          new PlainServerCallbackHandler(authTypeStr));
-    } catch (AuthenticationException e) {
-      throw new LoginException ("Error setting callback handler" + e);
-    }
-    return saslFactory;
-  }
-
-  public static TTransport getPlainTransport(String userName, String passwd,
-      final TTransport underlyingTransport) throws SaslException {
-    return new TSaslClientTransport("PLAIN", null,
-        null, null, new HashMap<String, String>(),
-        new PlainClientbackHandler(userName, passwd), underlyingTransport);
-  }
-
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/PlainSaslServer.java Tue Sep  2 19:56:56 2014
@@ -18,10 +18,10 @@
 package org.apache.hive.service.auth;
 
 import java.io.IOException;
+import java.security.Provider;
 import java.util.ArrayDeque;
 import java.util.Deque;
 import java.util.Map;
-
 import javax.security.auth.callback.Callback;
 import javax.security.auth.callback.CallbackHandler;
 import javax.security.auth.callback.NameCallback;
@@ -35,27 +35,26 @@ import javax.security.sasl.SaslServerFac
 import org.apache.hive.service.auth.AuthenticationProviderFactory.AuthMethods;
 
 /**
- *
- * PlainSaslServer.
- * Sun JDK only provides PLAIN client and not server. This class implements the Plain SASL server
- * conforming to RFC #4616 (http://www.ietf.org/rfc/rfc4616.txt)
+ * Sun JDK only provides a PLAIN client and no server. This class implements the Plain SASL server
+ * conforming to RFC #4616 (http://www.ietf.org/rfc/rfc4616.txt).
  */
-public class PlainSaslServer implements SaslServer  {
-  private final AuthMethods authMethod;
+public class PlainSaslServer implements SaslServer {
+
+  public static final String PLAIN_METHOD = "PLAIN";
   private String user;
-  private String passwd;
-  private String authzId;
   private final CallbackHandler handler;
 
   PlainSaslServer(CallbackHandler handler, String authMethodStr) throws SaslException {
     this.handler = handler;
-    this.authMethod = AuthMethods.getValidAuthMethod(authMethodStr);
+    AuthMethods.getValidAuthMethod(authMethodStr);
   }
 
+  @Override
   public String getMechanismName() {
-    return "PLAIN";
+    return PLAIN_METHOD;
   }
 
+  @Override
   public byte[] evaluateResponse(byte[] response) throws SaslException {
     try {
       // parse the response
@@ -68,28 +67,29 @@ public class PlainSaslServer implements 
           tokenList.addLast(messageToken.toString());
           messageToken = new StringBuilder();
         } else {
-          messageToken.append((char)b);
+          messageToken.append((char) b);
         }
       }
       tokenList.addLast(messageToken.toString());
 
       // validate response
-      if ((tokenList.size() < 2) || (tokenList.size() > 3)) {
+      if (tokenList.size() < 2 || tokenList.size() > 3) {
         throw new SaslException("Invalid message format");
       }
-      passwd = tokenList.removeLast();
+      String passwd = tokenList.removeLast();
       user = tokenList.removeLast();
       // optional authzid
-      if (!tokenList.isEmpty()) {
-        authzId = tokenList.removeLast();
-      } else {
+      String authzId;
+      if (tokenList.isEmpty()) {
         authzId = user;
+      } else {
+        authzId = tokenList.removeLast();
       }
       if (user == null || user.isEmpty()) {
-        throw new SaslException("No user name provide");
+        throw new SaslException("No user name provided");
       }
       if (passwd == null || passwd.isEmpty()) {
-        throw new SaslException("No password name provide");
+        throw new SaslException("No password name provided");
       }
 
       NameCallback nameCallback = new NameCallback("User");
@@ -98,7 +98,7 @@ public class PlainSaslServer implements 
       pcCallback.setPassword(passwd.toCharArray());
       AuthorizeCallback acCallback = new AuthorizeCallback(user, authzId);
 
-      Callback[] cbList = new Callback[] {nameCallback, pcCallback, acCallback};
+      Callback[] cbList = {nameCallback, pcCallback, acCallback};
       handler.handle(cbList);
       if (!acCallback.isAuthorized()) {
         throw new SaslException("Authentication failed");
@@ -113,49 +113,62 @@ public class PlainSaslServer implements 
     return null;
   }
 
+  @Override
   public boolean isComplete() {
     return user != null;
   }
 
+  @Override
   public String getAuthorizationID() {
     return user;
   }
 
+  @Override
   public byte[] unwrap(byte[] incoming, int offset, int len) {
-      throw new UnsupportedOperationException();
+    throw new UnsupportedOperationException();
   }
 
+  @Override
   public byte[] wrap(byte[] outgoing, int offset, int len) {
     throw new UnsupportedOperationException();
   }
 
+  @Override
   public Object getNegotiatedProperty(String propName) {
     return null;
   }
 
+  @Override
   public void dispose() {}
 
   public static class SaslPlainServerFactory implements SaslServerFactory {
 
-    public SaslServer createSaslServer(
-      String mechanism, String protocol, String serverName, Map<String,?> props, CallbackHandler cbh)
-    {
-      if ("PLAIN".equals(mechanism)) {
+    @Override
+    public SaslServer createSaslServer(String mechanism, String protocol, String serverName,
+      Map<String, ?> props, CallbackHandler cbh) {
+      if (PLAIN_METHOD.equals(mechanism)) {
         try {
           return new PlainSaslServer(cbh, protocol);
         } catch (SaslException e) {
+          /* This is to fulfill the contract of the interface which states that an exception shall
+             be thrown when a SaslServer cannot be created due to an error but null should be
+             returned when a Server can't be created due to the parameters supplied. And the only
+             thing PlainSaslServer can fail on is a non-supported authentication mechanism.
+             That's why we return null instead of throwing the Exception */
           return null;
         }
       }
       return null;
     }
 
+    @Override
     public String[] getMechanismNames(Map<String, ?> props) {
-      return new String[] { "PLAIN" };
+      return new String[] {PLAIN_METHOD};
     }
   }
 
-  public static class SaslPlainProvider extends java.security.Provider {
+  public static class SaslPlainProvider extends Provider {
+
     public SaslPlainProvider() {
       super("HiveSaslPlain", 1.0, "Hive Plain SASL provider");
       put("SaslServerFactory.PLAIN", SaslPlainServerFactory.class.getName());

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/SaslQOP.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/SaslQOP.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/SaslQOP.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/SaslQOP.java Tue Sep  2 19:56:56 2014
@@ -22,7 +22,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 /**
- * Possible values of  SASL quality-of-protection value.
+ * Possible values of SASL quality-of-protection value.
  */
 public enum SaslQOP {
   AUTH("auth"), // Authentication only.
@@ -32,14 +32,15 @@ public enum SaslQOP {
 
   public final String saslQop;
 
-  private static final Map<String, SaslQOP> strToEnum
-          = new HashMap<String, SaslQOP>();
+  private static final Map<String, SaslQOP> STR_TO_ENUM = new HashMap<String, SaslQOP>();
+
   static {
-    for (SaslQOP SaslQOP : values())
-      strToEnum.put(SaslQOP.toString(), SaslQOP);
+    for (SaslQOP saslQop : values()) {
+      STR_TO_ENUM.put(saslQop.toString(), saslQop);
+    }
   }
 
-  private SaslQOP(final String saslQop) {
+  SaslQOP(String saslQop) {
     this.saslQop = saslQop;
   }
 
@@ -48,13 +49,13 @@ public enum SaslQOP {
   }
 
   public static SaslQOP fromString(String str) {
-    if(str != null) {
+    if (str != null) {
       str = str.toLowerCase();
     }
-    SaslQOP saslQOP = strToEnum.get(str);
-    if(saslQOP == null) {
-      throw new IllegalArgumentException("Unknown auth type: " + str + " Allowed values are: "
-              + strToEnum.keySet());
+    SaslQOP saslQOP = STR_TO_ENUM.get(str);
+    if (saslQOP == null) {
+      throw new IllegalArgumentException(
+        "Unknown auth type: " + str + " Allowed values are: " + STR_TO_ENUM.keySet());
     }
     return saslQOP;
   }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSetIpAddressProcessor.java Tue Sep  2 19:56:56 2014
@@ -34,10 +34,12 @@ import org.slf4j.LoggerFactory;
  * This class is responsible for setting the ipAddress for operations executed via HiveServer2.
  * <p>
  * <ul>
- * <li>Ipaddress is only set for operations that calls listeners with hookContext @see ExecuteWithHookContext.</li>
- * <li>Ipaddress is only set if the underlying transport mechanism is socket. </li>
+ * <li>IP address is only set for operations that calls listeners with hookContext</li>
+ * <li>IP address is only set if the underlying transport mechanism is socket</li>
  * </ul>
  * </p>
+ *
+ * @see org.apache.hadoop.hive.ql.hooks.ExecuteWithHookContext
  */
 public class TSetIpAddressProcessor<I extends Iface> extends TCLIService.Processor<Iface> {
 
@@ -54,26 +56,26 @@ public class TSetIpAddressProcessor<I ex
     try {
       return super.process(in, out);
     } finally {
-      threadLocalUserName.remove();
-      threadLocalIpAddress.remove();
+      THREAD_LOCAL_USER_NAME.remove();
+      THREAD_LOCAL_IP_ADDRESS.remove();
     }
   }
 
   private void setUserName(final TProtocol in) {
     TTransport transport = in.getTransport();
     if (transport instanceof TSaslServerTransport) {
-      String userName = ((TSaslServerTransport)transport).getSaslServer().getAuthorizationID();
-      threadLocalUserName.set(userName);
+      String userName = ((TSaslServerTransport) transport).getSaslServer().getAuthorizationID();
+      THREAD_LOCAL_USER_NAME.set(userName);
     }
   }
 
   protected void setIpAddress(final TProtocol in) {
     TTransport transport = in.getTransport();
     TSocket tSocket = getUnderlyingSocketFromTransport(transport);
-    if (tSocket != null) {
-      threadLocalIpAddress.set(tSocket.getSocket().getInetAddress().toString());
-    } else {
+    if (tSocket == null) {
       LOGGER.warn("Unknown Transport, cannot determine ipAddress");
+    } else {
+      THREAD_LOCAL_IP_ADDRESS.set(tSocket.getSocket().getInetAddress().toString());
     }
   }
 
@@ -92,14 +94,14 @@ public class TSetIpAddressProcessor<I ex
     return null;
   }
 
-  private static ThreadLocal<String> threadLocalIpAddress = new ThreadLocal<String>() {
+  private static final ThreadLocal<String> THREAD_LOCAL_IP_ADDRESS = new ThreadLocal<String>() {
     @Override
     protected synchronized String initialValue() {
       return null;
     }
   };
 
-  private static ThreadLocal<String> threadLocalUserName = new ThreadLocal<String>(){
+  private static final ThreadLocal<String> THREAD_LOCAL_USER_NAME = new ThreadLocal<String>() {
     @Override
     protected synchronized String initialValue() {
       return null;
@@ -107,10 +109,10 @@ public class TSetIpAddressProcessor<I ex
   };
 
   public static String getUserIpAddress() {
-    return threadLocalIpAddress.get();
+    return THREAD_LOCAL_IP_ADDRESS.get();
   }
 
   public static String getUserName() {
-    return threadLocalUserName.get();
+    return THREAD_LOCAL_USER_NAME.get();
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/auth/TSubjectAssumingTransport.java Tue Sep  2 19:56:56 2014
@@ -22,7 +22,6 @@ import java.security.AccessControlContex
 import java.security.AccessController;
 import java.security.PrivilegedActionException;
 import java.security.PrivilegedExceptionAction;
-
 import javax.security.auth.Subject;
 
 import org.apache.hadoop.hive.thrift.TFilterTransport;
@@ -30,43 +29,42 @@ import org.apache.thrift.transport.TTran
 import org.apache.thrift.transport.TTransportException;
 
 /**
-  *
-  * This is used on the client side, where the API explicitly opens a transport to
-  * the server using the Subject.doAs()
-  */
- public class TSubjectAssumingTransport extends TFilterTransport {
-
-   public TSubjectAssumingTransport(TTransport wrapped) {
-     super(wrapped);
-   }
-
-   @Override
-   public void open() throws TTransportException {
-     try {
-       AccessControlContext context = AccessController.getContext();
-       Subject subject = Subject.getSubject(context);
-       Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
-         public Void run() {
-           try {
-             wrapped.open();
-           } catch (TTransportException tte) {
-             // Wrap the transport exception in an RTE, since Subject.doAs() then goes
-             // and unwraps this for us out of the doAs block. We then unwrap one
-             // more time in our catch clause to get back the TTE. (ugh)
-             throw new RuntimeException(tte);
-           }
-           return null;
-         }
-       });
-     } catch (PrivilegedActionException ioe) {
-       throw new RuntimeException("Received an ioe we never threw!", ioe);
-     } catch (RuntimeException rte) {
-       if (rte.getCause() instanceof TTransportException) {
-         throw (TTransportException)rte.getCause();
-       } else {
-         throw rte;
-       }
-     }
-   }
+ * This is used on the client side, where the API explicitly opens a transport to
+ * the server using the Subject.doAs().
+ */
+public class TSubjectAssumingTransport extends TFilterTransport {
+
+  public TSubjectAssumingTransport(TTransport wrapped) {
+    super(wrapped);
+  }
+
+  @Override
+  public void open() throws TTransportException {
+    try {
+      AccessControlContext context = AccessController.getContext();
+      Subject subject = Subject.getSubject(context);
+      Subject.doAs(subject, new PrivilegedExceptionAction<Void>() {
+        public Void run() {
+          try {
+            wrapped.open();
+          } catch (TTransportException tte) {
+            // Wrap the transport exception in an RTE, since Subject.doAs() then goes
+            // and unwraps this for us out of the doAs block. We then unwrap one
+            // more time in our catch clause to get back the TTE. (ugh)
+            throw new RuntimeException(tte);
+          }
+          return null;
+        }
+      });
+    } catch (PrivilegedActionException ioe) {
+      throw new RuntimeException("Received an ioe we never threw!", ioe);
+    } catch (RuntimeException rte) {
+      if (rte.getCause() instanceof TTransportException) {
+        throw (TTransportException) rte.getCause();
+      } else {
+        throw rte;
+      }
+    }
+  }
 
- }
+}

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIService.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIService.java Tue Sep  2 19:56:56 2014
@@ -46,7 +46,6 @@ import org.apache.hive.service.Composite
 import org.apache.hive.service.ServiceException;
 import org.apache.hive.service.auth.HiveAuthFactory;
 import org.apache.hive.service.cli.operation.Operation;
-import org.apache.hive.service.cli.session.HiveSession;
 import org.apache.hive.service.cli.session.SessionManager;
 import org.apache.hive.service.cli.thrift.TProtocolVersion;
 
@@ -67,7 +66,6 @@ public class CLIService extends Composit
 
   private HiveConf hiveConf;
   private SessionManager sessionManager;
-  private IMetaStoreClient metastoreClient;
   private UserGroupInformation serviceUGI;
   private UserGroupInformation httpUGI;
 
@@ -80,11 +78,8 @@ public class CLIService extends Composit
     this.hiveConf = hiveConf;
     sessionManager = new SessionManager();
     addService(sessionManager);
-    /**
-     * If auth mode is Kerberos, do a kerberos login for the service from the keytab
-     */
-    if (hiveConf.getVar(ConfVars.HIVE_SERVER2_AUTHENTICATION).equalsIgnoreCase(
-        HiveAuthFactory.AuthTypes.KERBEROS.toString())) {
+    //  If the hadoop cluster is secure, do a kerberos login for the service from the keytab
+    if (ShimLoader.getHadoopShims().isSecurityEnabled()) {
       try {
         HiveAuthFactory.loginFromKeytab(hiveConf);
         this.serviceUGI = ShimLoader.getHadoopShims().getUGIForConf(hiveConf);
@@ -132,21 +127,23 @@ public class CLIService extends Composit
     } catch (IOException eIO) {
       throw new ServiceException("Error setting stage directories", eIO);
     }
-
+    // Initialize and test a connection to the metastore
+    IMetaStoreClient metastoreClient = null;
     try {
-      // Initialize and test a connection to the metastore
       metastoreClient = new HiveMetaStoreClient(hiveConf);
       metastoreClient.getDatabases("default");
     } catch (Exception e) {
       throw new ServiceException("Unable to connect to MetaStore!", e);
     }
+    finally {
+      if (metastoreClient != null) {
+        metastoreClient.close();
+      }
+    }
   }
 
   @Override
   public synchronized void stop() {
-    if (metastoreClient != null) {
-      metastoreClient.close();
-    }
     super.stop();
   }
 
@@ -170,7 +167,7 @@ public class CLIService extends Composit
           throws HiveSQLException {
     SessionHandle sessionHandle = sessionManager.openSession(protocol, username, password, null, configuration,
         true, delegationToken);
-    LOG.debug(sessionHandle + ": openSession()");
+    LOG.debug(sessionHandle + ": openSessionWithImpersonation()");
     return sessionHandle;
   }
 
@@ -365,8 +362,9 @@ public class CLIService extends Composit
      * However, if the background operation is complete, we return immediately.
      */
     if (operation.shouldRunAsync()) {
-      long timeout = operation.getParentSession().getHiveConf().getLongVar(
-          HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT);
+      HiveConf conf = operation.getParentSession().getHiveConf();
+      long timeout = HiveConf.getTimeVar(conf,
+          HiveConf.ConfVars.HIVE_SERVER2_LONG_POLLING_TIMEOUT, TimeUnit.MILLISECONDS);
       try {
         operation.getBackgroundHandle().get(timeout, TimeUnit.MILLISECONDS);
       } catch (TimeoutException e) {
@@ -377,7 +375,7 @@ public class CLIService extends Composit
         LOG.trace(opHandle + ": The background operation was cancelled", e);
       } catch (ExecutionException e) {
         // The background operation thread was aborted
-        LOG.trace(opHandle + ": The background operation was aborted", e);
+        LOG.warn(opHandle + ": The background operation was aborted", e);
       } catch (InterruptedException e) {
         // No op, this thread was interrupted
         // In this case, the call might return sooner than long polling timeout
@@ -423,25 +421,20 @@ public class CLIService extends Composit
   }
 
   /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long)
+   * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle)
    */
   @Override
-  public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
+  public RowSet fetchResults(OperationHandle opHandle)
       throws HiveSQLException {
-    RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle)
-        .getParentSession().fetchResults(opHandle, orientation, maxRows);
-    LOG.debug(opHandle + ": fetchResults()");
-    return rowSet;
+    return fetchResults(opHandle, Operation.DEFAULT_FETCH_ORIENTATION,
+        Operation.DEFAULT_FETCH_MAX_ROWS, FetchType.QUERY_OUTPUT);
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle)
-   */
   @Override
-  public RowSet fetchResults(OperationHandle opHandle)
-      throws HiveSQLException {
+  public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
+                             long maxRows, FetchType fetchType) throws HiveSQLException {
     RowSet rowSet = sessionManager.getOperationManager().getOperation(opHandle)
-        .getParentSession().fetchResults(opHandle);
+        .getParentSession().fetchResults(opHandle, orientation, maxRows, fetchType);
     LOG.debug(opHandle + ": fetchResults()");
     return rowSet;
   }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/CLIServiceClient.java Tue Sep  2 19:56:56 2014
@@ -28,19 +28,17 @@ import org.apache.hive.service.auth.Hive
  *
  */
 public abstract class CLIServiceClient implements ICLIService {
+  private static final long DEFAULT_MAX_ROWS = 1000;
 
   public SessionHandle openSession(String username, String password)
       throws HiveSQLException {
     return openSession(username, password, Collections.<String, String>emptyMap());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.ICLIService#fetchResults(org.apache.hive.service.cli.OperationHandle)
-   */
   @Override
   public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException {
     // TODO: provide STATIC default value
-    return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, 1000);
+    return fetchResults(opHandle, FetchOrientation.FETCH_NEXT, DEFAULT_MAX_ROWS, FetchType.QUERY_OUTPUT);
   }
 
   @Override

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/EmbeddedCLIServiceClient.java Tue Sep  2 19:56:56 2014
@@ -181,13 +181,10 @@ public class EmbeddedCLIServiceClient ex
     return cliService.getResultSetMetadata(opHandle);
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.CLIServiceClient#fetchResults(org.apache.hive.service.cli.OperationHandle, org.apache.hive.service.cli.FetchOrientation, long)
-   */
   @Override
-  public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
-      throws HiveSQLException {
-    return cliService.fetchResults(opHandle, orientation, maxRows);
+  public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
+      long maxRows,  FetchType fetchType) throws HiveSQLException {
+    return cliService.fetchResults(opHandle, orientation, maxRows, fetchType);
   }
 
 

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/ICLIService.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/ICLIService.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/ICLIService.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/ICLIService.java Tue Sep  2 19:56:56 2014
@@ -27,79 +27,78 @@ import org.apache.hive.service.auth.Hive
 
 public interface ICLIService {
 
-  public abstract SessionHandle openSession(String username, String password,
+  SessionHandle openSession(String username, String password,
       Map<String, String> configuration)
           throws HiveSQLException;
 
-  public abstract SessionHandle openSessionWithImpersonation(String username, String password,
+  SessionHandle openSessionWithImpersonation(String username, String password,
       Map<String, String> configuration, String delegationToken)
           throws HiveSQLException;
 
-  public abstract void closeSession(SessionHandle sessionHandle)
+  void closeSession(SessionHandle sessionHandle)
       throws HiveSQLException;
 
-  public abstract GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType infoType)
+  GetInfoValue getInfo(SessionHandle sessionHandle, GetInfoType infoType)
       throws HiveSQLException;
 
-  public abstract OperationHandle executeStatement(SessionHandle sessionHandle, String statement,
+  OperationHandle executeStatement(SessionHandle sessionHandle, String statement,
       Map<String, String> confOverlay)
           throws HiveSQLException;
 
-  public abstract OperationHandle executeStatementAsync(SessionHandle sessionHandle,
+  OperationHandle executeStatementAsync(SessionHandle sessionHandle,
       String statement, Map<String, String> confOverlay)
           throws HiveSQLException;
 
-  public abstract OperationHandle getTypeInfo(SessionHandle sessionHandle)
+  OperationHandle getTypeInfo(SessionHandle sessionHandle)
       throws HiveSQLException;
 
-  public abstract OperationHandle getCatalogs(SessionHandle sessionHandle)
+  OperationHandle getCatalogs(SessionHandle sessionHandle)
       throws HiveSQLException;
 
-  public abstract OperationHandle getSchemas(SessionHandle sessionHandle,
+  OperationHandle getSchemas(SessionHandle sessionHandle,
       String catalogName, String schemaName)
           throws HiveSQLException;
 
-  public abstract OperationHandle getTables(SessionHandle sessionHandle,
+  OperationHandle getTables(SessionHandle sessionHandle,
       String catalogName, String schemaName, String tableName, List<String> tableTypes)
           throws HiveSQLException;
 
-  public abstract OperationHandle getTableTypes(SessionHandle sessionHandle)
+  OperationHandle getTableTypes(SessionHandle sessionHandle)
       throws HiveSQLException;
 
-  public abstract OperationHandle getColumns(SessionHandle sessionHandle,
+  OperationHandle getColumns(SessionHandle sessionHandle,
       String catalogName, String schemaName, String tableName, String columnName)
           throws HiveSQLException;
 
-  public abstract OperationHandle getFunctions(SessionHandle sessionHandle,
+  OperationHandle getFunctions(SessionHandle sessionHandle,
       String catalogName, String schemaName, String functionName)
           throws HiveSQLException;
 
-  public abstract OperationStatus getOperationStatus(OperationHandle opHandle)
+  OperationStatus getOperationStatus(OperationHandle opHandle)
       throws HiveSQLException;
 
-  public abstract void cancelOperation(OperationHandle opHandle)
+  void cancelOperation(OperationHandle opHandle)
       throws HiveSQLException;
 
-  public abstract void closeOperation(OperationHandle opHandle)
+  void closeOperation(OperationHandle opHandle)
       throws HiveSQLException;
 
-  public abstract TableSchema getResultSetMetadata(OperationHandle opHandle)
+  TableSchema getResultSetMetadata(OperationHandle opHandle)
       throws HiveSQLException;
 
-  public abstract RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
-      long maxRows)
-          throws HiveSQLException;
-
-  public abstract RowSet fetchResults(OperationHandle opHandle)
+  RowSet fetchResults(OperationHandle opHandle)
       throws HiveSQLException;
 
-  public abstract String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+  RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
+      long maxRows, FetchType fetchType) throws HiveSQLException;
+
+  String getDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
       String owner, String renewer) throws HiveSQLException;
 
-  public abstract void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+  void cancelDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
       String tokenStr) throws HiveSQLException;
 
-  public abstract void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
+  void renewDelegationToken(SessionHandle sessionHandle, HiveAuthFactory authFactory,
       String tokenStr) throws HiveSQLException;
 
 

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/OperationState.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/OperationState.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/OperationState.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/OperationState.java Tue Sep  2 19:56:56 2014
@@ -25,29 +25,26 @@ import org.apache.hive.service.cli.thrif
  *
  */
 public enum OperationState {
-  INITIALIZED(TOperationState.INITIALIZED_STATE),
-  RUNNING(TOperationState.RUNNING_STATE),
-  FINISHED(TOperationState.FINISHED_STATE),
-  CANCELED(TOperationState.CANCELED_STATE),
-  CLOSED(TOperationState.CLOSED_STATE),
-  ERROR(TOperationState.ERROR_STATE),
-  UNKNOWN(TOperationState.UKNOWN_STATE),
-  PENDING(TOperationState.PENDING_STATE);
+  INITIALIZED(TOperationState.INITIALIZED_STATE, false),
+  RUNNING(TOperationState.RUNNING_STATE, false),
+  FINISHED(TOperationState.FINISHED_STATE, true),
+  CANCELED(TOperationState.CANCELED_STATE, true),
+  CLOSED(TOperationState.CLOSED_STATE, true),
+  ERROR(TOperationState.ERROR_STATE, true),
+  UNKNOWN(TOperationState.UKNOWN_STATE, false),
+  PENDING(TOperationState.PENDING_STATE, false);
 
   private final TOperationState tOperationState;
+  private final boolean terminal;
 
-  OperationState(TOperationState tOperationState) {
+  OperationState(TOperationState tOperationState, boolean terminal) {
     this.tOperationState = tOperationState;
+    this.terminal = terminal;
   }
 
+  // must be sync with TOperationState in order
   public static OperationState getOperationState(TOperationState tOperationState) {
-    // TODO: replace this with a Map?
-    for (OperationState opState : values()) {
-      if (tOperationState.equals(opState.tOperationState)) {
-        return opState;
-      }
-    }
-    return OperationState.UNKNOWN;
+    return OperationState.values()[tOperationState.getValue()];
   }
 
   public static void validateTransition(OperationState oldState,
@@ -91,7 +88,8 @@ public enum OperationState {
     default:
       // fall-through
     }
-    throw new HiveSQLException("Illegal Operation state transition");
+    throw new HiveSQLException("Illegal Operation state transition " +
+        "from " + oldState + " to " + newState);
   }
 
   public void validateTransition(OperationState newState)
@@ -102,4 +100,8 @@ public enum OperationState {
   public TOperationState toTOperationState() {
     return tOperationState;
   }
+
+  public boolean isTerminal() {
+    return terminal;
+  }
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetCatalogsOperation.java Tue Sep  2 19:56:56 2014
@@ -42,11 +42,8 @@ public class GetCatalogsOperation extend
     rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     setState(OperationState.FINISHED);
   }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetColumnsOperation.java Tue Sep  2 19:56:56 2014
@@ -114,11 +114,8 @@ public class GetColumnsOperation extends
     this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
       IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetFunctionsOperation.java Tue Sep  2 19:56:56 2014
@@ -68,11 +68,8 @@ public class GetFunctionsOperation exten
     this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
       if ((null == catalogName || "".equals(catalogName))

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetSchemasOperation.java Tue Sep  2 19:56:56 2014
@@ -50,11 +50,8 @@ public class GetSchemasOperation extends
     this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
       IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTableTypesOperation.java Tue Sep  2 19:56:56 2014
@@ -50,11 +50,8 @@ public class GetTableTypesOperation exte
     rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
       for (TableType type : TableType.values()) {

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTablesOperation.java Tue Sep  2 19:56:56 2014
@@ -71,11 +71,8 @@ public class GetTablesOperation extends 
     this.rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
       IMetaStoreClient metastoreClient = getParentSession().getMetaStoreClient();

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/GetTypeInfoOperation.java Tue Sep  2 19:56:56 2014
@@ -79,11 +79,8 @@ public class GetTypeInfoOperation extend
     rowSet = RowSetFactory.create(RESULT_SET_SCHEMA, getProtocolVersion());
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
       for (Type type : Type.values()) {

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/HiveCommandOperation.java Tue Sep  2 19:56:56 2014
@@ -94,11 +94,8 @@ public class HiveCommandOperation extend
     IOUtils.cleanup(LOG, parentSession.getSessionState().err);
   }
 
-  /* (non-Javadoc)
-   * @see org.apache.hive.service.cli.operation.Operation#run()
-   */
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.RUNNING);
     try {
       String command = getStatement().trim();
@@ -136,6 +133,7 @@ public class HiveCommandOperation extend
     setState(OperationState.CLOSED);
     tearDownSessionIO();
     cleanTmpFile();
+    cleanupOperationLog();
   }
 
   /* (non-Javadoc)

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/MetadataOperation.java Tue Sep  2 19:56:56 2014
@@ -46,6 +46,7 @@ public abstract class MetadataOperation 
   @Override
   public void close() throws HiveSQLException {
     setState(OperationState.CLOSED);
+    cleanupOperationLog();
   }
 
   /**

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/Operation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/Operation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/Operation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/Operation.java Tue Sep  2 19:56:56 2014
@@ -17,8 +17,11 @@
  */
 package org.apache.hive.service.cli.operation;
 
+import java.io.File;
+import java.io.FileNotFoundException;
 import java.util.EnumSet;
 import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -41,20 +44,28 @@ public abstract class Operation {
   private final OperationHandle opHandle;
   private HiveConf configuration;
   public static final Log LOG = LogFactory.getLog(Operation.class.getName());
+  public static final FetchOrientation DEFAULT_FETCH_ORIENTATION = FetchOrientation.FETCH_NEXT;
   public static final long DEFAULT_FETCH_MAX_ROWS = 100;
   protected boolean hasResultSet;
   protected volatile HiveSQLException operationException;
   protected final boolean runAsync;
   protected volatile Future<?> backgroundHandle;
+  protected OperationLog operationLog;
+  protected boolean isOperationLogEnabled;
+
+  private long operationTimeout;
+  private long lastAccessTime;
 
   protected static final EnumSet<FetchOrientation> DEFAULT_FETCH_ORIENTATION_SET =
       EnumSet.of(FetchOrientation.FETCH_NEXT,FetchOrientation.FETCH_FIRST);
 
   protected Operation(HiveSession parentSession, OperationType opType, boolean runInBackground) {
-    super();
     this.parentSession = parentSession;
     this.runAsync = runInBackground;
     this.opHandle = new OperationHandle(opType, parentSession.getProtocolVersion());
+    lastAccessTime = System.currentTimeMillis();
+    operationTimeout = HiveConf.getTimeVar(parentSession.getHiveConf(),
+        HiveConf.ConfVars.HIVE_SERVER2_IDLE_OPERATION_TIMEOUT, TimeUnit.MILLISECONDS);
   }
 
   public Future<?> getBackgroundHandle() {
@@ -106,12 +117,40 @@ public abstract class Operation {
     opHandle.setHasResultSet(hasResultSet);
   }
 
+  public OperationLog getOperationLog() {
+    return operationLog;
+  }
+
   protected final OperationState setState(OperationState newState) throws HiveSQLException {
     state.validateTransition(newState);
     this.state = newState;
+    this.lastAccessTime = System.currentTimeMillis();
     return this.state;
   }
 
+  public boolean isTimedOut(long current) {
+    if (operationTimeout == 0) {
+      return false;
+    }
+    if (operationTimeout > 0) {
+      // check only when it's in terminal state
+      return state.isTerminal() && lastAccessTime + operationTimeout <= current;
+    }
+    return lastAccessTime + -operationTimeout <= current;
+  }
+
+  public long getLastAccessTime() {
+    return lastAccessTime;
+  }
+
+  public long getOperationTimeout() {
+    return operationTimeout;
+  }
+
+  public void setOperationTimeout(long operationTimeout) {
+    this.operationTimeout = operationTimeout;
+  }
+
   protected void setOperationException(HiveSQLException operationException) {
     this.operationException = operationException;
   }
@@ -120,6 +159,7 @@ public abstract class Operation {
     if (this.state != state) {
       throw new HiveSQLException("Expected state " + state + ", but found " + this.state);
     }
+    this.lastAccessTime = System.currentTimeMillis();
   }
 
   public boolean isRunning() {
@@ -138,7 +178,97 @@ public abstract class Operation {
     return OperationState.ERROR.equals(state);
   }
 
-  public abstract void run() throws HiveSQLException;
+  protected void createOperationLog() {
+    if (parentSession.isOperationLogEnabled()) {
+      File operationLogFile = new File(parentSession.getOperationLogSessionDir(),
+          opHandle.getHandleIdentifier().toString());
+      isOperationLogEnabled = true;
+
+      // create log file
+      try {
+        if (operationLogFile.exists()) {
+          LOG.warn("The operation log file should not exist, but it is already there: " +
+              operationLogFile.getAbsolutePath());
+          operationLogFile.delete();
+        }
+        if (!operationLogFile.createNewFile()) {
+          // the log file already exists and cannot be deleted.
+          // If it can be read/written, keep its contents and use it.
+          if (!operationLogFile.canRead() || !operationLogFile.canWrite()) {
+            LOG.warn("The already existed operation log file cannot be recreated, " +
+                "and it cannot be read or written: " + operationLogFile.getAbsolutePath());
+            isOperationLogEnabled = false;
+            return;
+          }
+        }
+      } catch (Exception e) {
+        LOG.warn("Unable to create operation log file: " + operationLogFile.getAbsolutePath(), e);
+        isOperationLogEnabled = false;
+        return;
+      }
+
+      // create OperationLog object with above log file
+      try {
+        operationLog = new OperationLog(opHandle.toString(), operationLogFile);
+      } catch (FileNotFoundException e) {
+        LOG.warn("Unable to instantiate OperationLog object for operation: " +
+            opHandle, e);
+        isOperationLogEnabled = false;
+        return;
+      }
+
+      // register this operationLog to current thread
+      OperationLog.setCurrentOperationLog(operationLog);
+    }
+  }
+
+  protected void unregisterOperationLog() {
+    if (isOperationLogEnabled) {
+      OperationLog.removeCurrentOperationLog();
+    }
+  }
+
+  /**
+   * Invoked before runInternal().
+   * Set up some preconditions, or configurations.
+   */
+  protected void beforeRun() {
+    createOperationLog();
+  }
+
+  /**
+   * Invoked after runInternal(), even if an exception is thrown in runInternal().
+   * Clean up resources, which was set up in beforeRun().
+   */
+  protected void afterRun() {
+    unregisterOperationLog();
+  }
+
+  /**
+   * Implemented by subclass of Operation class to execute specific behaviors.
+   * @throws HiveSQLException
+   */
+  protected abstract void runInternal() throws HiveSQLException;
+
+  public void run() throws HiveSQLException {
+    beforeRun();
+    try {
+      runInternal();
+    } finally {
+      afterRun();
+    }
+  }
+
+  protected void cleanupOperationLog() {
+    if (isOperationLogEnabled) {
+      if (operationLog == null) {
+        LOG.error("Operation [ " + opHandle.getHandleIdentifier() + " ] "
+          + "logging is enabled, but its OperationLog object cannot be found.");
+      } else {
+        operationLog.close();
+      }
+    }
+  }
 
   // TODO: make this abstract and implement in subclasses.
   public void cancel() throws HiveSQLException {

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/OperationManager.java Tue Sep  2 19:56:56 2014
@@ -18,6 +18,8 @@
 
 package org.apache.hive.service.cli.operation;
 
+import java.util.Enumeration;
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -25,22 +27,19 @@ import java.util.Map;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hive.service.AbstractService;
-import org.apache.hive.service.cli.FetchOrientation;
-import org.apache.hive.service.cli.HiveSQLException;
-import org.apache.hive.service.cli.OperationHandle;
-import org.apache.hive.service.cli.OperationState;
-import org.apache.hive.service.cli.OperationStatus;
-import org.apache.hive.service.cli.RowSet;
-import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.*;
 import org.apache.hive.service.cli.session.HiveSession;
+import org.apache.log4j.*;
 
 /**
  * OperationManager.
  *
  */
 public class OperationManager extends AbstractService {
-
+  private static final String DEFAULT_LAYOUT_PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n";
   private final Log LOG = LogFactory.getLog(OperationManager.class.getName());
 
   private HiveConf hiveConf;
@@ -54,7 +53,11 @@ public class OperationManager extends Ab
   @Override
   public synchronized void init(HiveConf hiveConf) {
     this.hiveConf = hiveConf;
-
+    if (hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_LOGGING_OPERATION_ENABLED)) {
+      initOperationLogCapture();
+    } else {
+      LOG.debug("Operation level logging is turned off");
+    }
     super.init(hiveConf);
   }
 
@@ -70,6 +73,30 @@ public class OperationManager extends Ab
     super.stop();
   }
 
+  private void initOperationLogCapture() {
+    // There should be a ConsoleAppender. Copy its Layout.
+    Logger root = Logger.getRootLogger();
+    Layout layout = null;
+
+    Enumeration<?> appenders = root.getAllAppenders();
+    while (appenders.hasMoreElements()) {
+      Appender ap = (Appender) appenders.nextElement();
+      if (ap.getClass().equals(ConsoleAppender.class)) {
+        layout = ap.getLayout();
+        break;
+      }
+    }
+
+    if (layout == null) {
+      layout = new PatternLayout(DEFAULT_LAYOUT_PATTERN);
+      LOG.info("Cannot find a Layout from a ConsoleAppender. Using default Layout pattern.");
+    }
+
+    // Register another Appender (with the same layout) that talks to us.
+    Appender ap = new LogDivertAppender(layout, this);
+    root.addAppender(ap);
+  }
+
   public ExecuteStatementOperation newExecuteStatementOperation(HiveSession parentSession,
       String statement, Map<String, String> confOverlay, boolean runAsync)
           throws HiveSQLException {
@@ -129,15 +156,27 @@ public class OperationManager extends Ab
     return operation;
   }
 
-  public synchronized Operation getOperation(OperationHandle operationHandle)
-      throws HiveSQLException {
-    Operation operation = handleToOperation.get(operationHandle);
+  public Operation getOperation(OperationHandle operationHandle) throws HiveSQLException {
+    Operation operation = getOperationInternal(operationHandle);
     if (operation == null) {
       throw new HiveSQLException("Invalid OperationHandle: " + operationHandle);
     }
     return operation;
   }
 
+  private synchronized Operation getOperationInternal(OperationHandle operationHandle) {
+    return handleToOperation.get(operationHandle);
+  }
+
+  private synchronized Operation removeTimedOutOperation(OperationHandle operationHandle) {
+    Operation operation = handleToOperation.get(operationHandle);
+    if (operation != null && operation.isTimedOut(System.currentTimeMillis())) {
+      handleToOperation.remove(operationHandle);
+      return operation;
+    }
+    return null;
+  }
+
   private synchronized void addOperation(Operation operation) {
     handleToOperation.put(operation.getHandle(), operation);
   }
@@ -191,4 +230,51 @@ public class OperationManager extends Ab
           throws HiveSQLException {
     return getOperation(opHandle).getNextRowSet(orientation, maxRows);
   }
+
+  public RowSet getOperationLogRowSet(OperationHandle opHandle,
+      FetchOrientation orientation, long maxRows)
+          throws HiveSQLException {
+    // get the OperationLog object from the operation
+    OperationLog operationLog = getOperation(opHandle).getOperationLog();
+    if (operationLog == null) {
+      throw new HiveSQLException("Couldn't find log associated with operation handle: " + opHandle);
+    }
+
+    // read logs
+    List<String> logs = operationLog.readOperationLog(orientation, maxRows);
+
+    // convert logs to RowSet
+    TableSchema tableSchema = new TableSchema(getLogSchema());
+    RowSet rowSet = RowSetFactory.create(tableSchema, getOperation(opHandle).getProtocolVersion());
+    for (String log : logs) {
+      rowSet.addRow(new String[] {log});
+    }
+
+    return rowSet;
+  }
+
+  private Schema getLogSchema() {
+    Schema schema = new Schema();
+    FieldSchema fieldSchema = new FieldSchema();
+    fieldSchema.setName("operation_log");
+    fieldSchema.setType("string");
+    schema.addToFieldSchemas(fieldSchema);
+    return schema;
+  }
+
+  public OperationLog getOperationLogByThread() {
+    return OperationLog.getCurrentOperationLog();
+  }
+
+  public List<Operation> removeExpiredOperations(OperationHandle[] handles) {
+    List<Operation> removed = new ArrayList<Operation>();
+    for (OperationHandle handle : handles) {
+      Operation operation = removeTimedOutOperation(handle);
+      if (operation != null) {
+        LOG.warn("Operation " + handle + " is timed-out and will be closed");
+        removed.add(operation);
+      }
+    }
+    return removed;
+  }
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java Tue Sep  2 19:56:56 2014
@@ -60,6 +60,7 @@ import org.apache.hive.service.cli.RowSe
 import org.apache.hive.service.cli.RowSetFactory;
 import org.apache.hive.service.cli.TableSchema;
 import org.apache.hive.service.cli.session.HiveSession;
+import org.apache.hive.service.server.ThreadWithGarbageCleanup;
 
 /**
  * SQLOperation.
@@ -134,7 +135,7 @@ public class SQLOperation extends Execut
     }
   }
 
-  private void runInternal(HiveConf sqlOperationConf) throws HiveSQLException {
+  private void runQuery(HiveConf sqlOperationConf) throws HiveSQLException {
     try {
       // In Hive server mode, we are not able to retry in the FetchTask
       // case, when calling fetch queries since execute() has returned.
@@ -164,50 +165,63 @@ public class SQLOperation extends Execut
   }
 
   @Override
-  public void run() throws HiveSQLException {
+  public void runInternal() throws HiveSQLException {
     setState(OperationState.PENDING);
     final HiveConf opConfig = getConfigForOperation();
     prepare(opConfig);
     if (!shouldRunAsync()) {
-      runInternal(opConfig);
+      runQuery(opConfig);
     } else {
+      // We'll pass ThreadLocals in the background thread from the foreground (handler) thread
       final SessionState parentSessionState = SessionState.get();
-      // current Hive object needs to be set in aysnc thread in case of remote metastore.
-      // The metastore client in Hive is associated with right user
-      final Hive sessionHive = getCurrentHive();
-      // current UGI will get used by metastore when metsatore is in embedded mode
-      // so this needs to get passed to the new async thread
+      // ThreadLocal Hive object needs to be set in background thread.
+      // The metastore client in Hive is associated with right user.
+      final Hive parentHive = getSessionHive();
+      // Current UGI will get used by metastore when metsatore is in embedded mode
+      // So this needs to get passed to the new background thread
       final UserGroupInformation currentUGI = getCurrentUGI(opConfig);
-
       // Runnable impl to call runInternal asynchronously,
       // from a different thread
       Runnable backgroundOperation = new Runnable() {
-
         @Override
         public void run() {
           PrivilegedExceptionAction<Object> doAsAction = new PrivilegedExceptionAction<Object>() {
             @Override
             public Object run() throws HiveSQLException {
-
-              // Storing the current Hive object necessary when doAs is enabled
-              // User information is part of the metastore client member in Hive
-              Hive.set(sessionHive);
+              Hive.set(parentHive);
               SessionState.setCurrentSessionState(parentSessionState);
+              // Set current OperationLog in this async thread for keeping on saving query log.
+              registerCurrentOperationLog();
               try {
-                runInternal(opConfig);
+                runQuery(opConfig);
               } catch (HiveSQLException e) {
                 setOperationException(e);
                 LOG.error("Error running hive query: ", e);
+              } finally {
+                unregisterOperationLog();
               }
               return null;
             }
           };
+
           try {
             ShimLoader.getHadoopShims().doAs(currentUGI, doAsAction);
           } catch (Exception e) {
             setOperationException(new HiveSQLException(e));
             LOG.error("Error running hive query as user : " + currentUGI.getShortUserName(), e);
           }
+          finally {
+            /**
+             * We'll cache the ThreadLocal RawStore object for this background thread for an orderly cleanup
+             * when this thread is garbage collected later.
+             * @see org.apache.hive.service.server.ThreadWithGarbageCleanup#finalize()
+             */
+            if (ThreadWithGarbageCleanup.currentThread() instanceof ThreadWithGarbageCleanup) {
+              ThreadWithGarbageCleanup currentThread =
+                  (ThreadWithGarbageCleanup) ThreadWithGarbageCleanup.currentThread();
+              currentThread.cacheThreadLocalRawStore();
+            }
+          }
         }
       };
       try {
@@ -223,6 +237,12 @@ public class SQLOperation extends Execut
     }
   }
 
+  /**
+   * Returns the current UGI on the stack
+   * @param opConfig
+   * @return UserGroupInformation
+   * @throws HiveSQLException
+   */
   private UserGroupInformation getCurrentUGI(HiveConf opConfig) throws HiveSQLException {
     try {
       return ShimLoader.getHadoopShims().getUGIForConf(opConfig);
@@ -231,11 +251,28 @@ public class SQLOperation extends Execut
     }
   }
 
-  private Hive getCurrentHive() throws HiveSQLException {
+  /**
+   * Returns the ThreadLocal Hive for the current thread
+   * @return Hive
+   * @throws HiveSQLException
+   */
+  private Hive getSessionHive() throws HiveSQLException {
     try {
       return Hive.get();
     } catch (HiveException e) {
-      throw new HiveSQLException("Failed to get current Hive object", e);
+      throw new HiveSQLException("Failed to get ThreadLocal Hive object", e);
+    }
+  }
+
+  private void registerCurrentOperationLog() {
+    if (isOperationLogEnabled) {
+      if (operationLog == null) {
+        LOG.warn("Failed to get current OperationLog object of Operation: " +
+            getHandle().getHandleIdentifier());
+        isOperationLogEnabled = false;
+        return;
+      }
+      OperationLog.setCurrentOperationLog(operationLog);
     }
   }
 
@@ -267,6 +304,7 @@ public class SQLOperation extends Execut
   @Override
   public void close() throws HiveSQLException {
     cleanup(OperationState.CLOSED);
+    cleanupOperationLog();
   }
 
   @Override

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSession.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSession.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSession.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSession.java Tue Sep  2 19:56:56 2014
@@ -23,19 +23,13 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hive.service.auth.HiveAuthFactory;
-import org.apache.hive.service.cli.FetchOrientation;
-import org.apache.hive.service.cli.GetInfoType;
-import org.apache.hive.service.cli.GetInfoValue;
-import org.apache.hive.service.cli.HiveSQLException;
-import org.apache.hive.service.cli.OperationHandle;
-import org.apache.hive.service.cli.RowSet;
-import org.apache.hive.service.cli.TableSchema;
+import org.apache.hive.service.cli.*;
 
 public interface HiveSession extends HiveSessionBase {
 
-  public void open();
+  void open();
 
-  public IMetaStoreClient getMetaStoreClient() throws HiveSQLException;
+  IMetaStoreClient getMetaStoreClient() throws HiveSQLException;
 
   /**
    * getInfo operation handler
@@ -43,7 +37,7 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public GetInfoValue getInfo(GetInfoType getInfoType) throws HiveSQLException;
+  GetInfoValue getInfo(GetInfoType getInfoType) throws HiveSQLException;
 
   /**
    * execute operation handler
@@ -52,7 +46,7 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle executeStatement(String statement,
+  OperationHandle executeStatement(String statement,
       Map<String, String> confOverlay) throws HiveSQLException;
 
   /**
@@ -62,7 +56,7 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle executeStatementAsync(String statement,
+  OperationHandle executeStatementAsync(String statement,
       Map<String, String> confOverlay) throws HiveSQLException;
 
   /**
@@ -70,14 +64,14 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle getTypeInfo() throws HiveSQLException;
+  OperationHandle getTypeInfo() throws HiveSQLException;
 
   /**
    * getCatalogs operation handler
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle getCatalogs() throws HiveSQLException;
+  OperationHandle getCatalogs() throws HiveSQLException;
 
   /**
    * getSchemas operation handler
@@ -86,7 +80,7 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle getSchemas(String catalogName, String schemaName)
+  OperationHandle getSchemas(String catalogName, String schemaName)
       throws HiveSQLException;
 
   /**
@@ -98,7 +92,7 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle getTables(String catalogName, String schemaName,
+  OperationHandle getTables(String catalogName, String schemaName,
       String tableName, List<String> tableTypes) throws HiveSQLException;
 
   /**
@@ -106,7 +100,7 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle getTableTypes() throws HiveSQLException ;
+  OperationHandle getTableTypes() throws HiveSQLException ;
 
   /**
    * getColumns operation handler
@@ -117,7 +111,7 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle getColumns(String catalogName, String schemaName,
+  OperationHandle getColumns(String catalogName, String schemaName,
       String tableName, String columnName)  throws HiveSQLException;
 
   /**
@@ -128,33 +122,33 @@ public interface HiveSession extends Hiv
    * @return
    * @throws HiveSQLException
    */
-  public OperationHandle getFunctions(String catalogName, String schemaName,
+  OperationHandle getFunctions(String catalogName, String schemaName,
       String functionName) throws HiveSQLException;
 
   /**
    * close the session
    * @throws HiveSQLException
    */
-  public void close() throws HiveSQLException;
+  void close() throws HiveSQLException;
 
-  public void cancelOperation(OperationHandle opHandle) throws HiveSQLException;
+  void cancelOperation(OperationHandle opHandle) throws HiveSQLException;
 
-  public void closeOperation(OperationHandle opHandle) throws HiveSQLException;
+  void closeOperation(OperationHandle opHandle) throws HiveSQLException;
 
-  public TableSchema getResultSetMetadata(OperationHandle opHandle)
+  TableSchema getResultSetMetadata(OperationHandle opHandle)
       throws HiveSQLException;
 
-  public RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation, long maxRows)
-      throws HiveSQLException;
-
-  public RowSet fetchResults(OperationHandle opHandle) throws HiveSQLException;
+  RowSet fetchResults(OperationHandle opHandle, FetchOrientation orientation,
+      long maxRows, FetchType fetchType) throws HiveSQLException;
 
-  public String getDelegationToken(HiveAuthFactory authFactory, String owner,
+  String getDelegationToken(HiveAuthFactory authFactory, String owner,
       String renewer) throws HiveSQLException;
 
-  public void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+  void cancelDelegationToken(HiveAuthFactory authFactory, String tokenStr)
       throws HiveSQLException;
 
-  public void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
+  void renewDelegationToken(HiveAuthFactory authFactory, String tokenStr)
       throws HiveSQLException;
+
+  void closeExpiredOperations();
 }

Modified: hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionBase.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionBase.java?rev=1622108&r1=1622107&r2=1622108&view=diff
==============================================================================
--- hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionBase.java (original)
+++ hive/branches/tez/service/src/java/org/apache/hive/service/cli/session/HiveSessionBase.java Tue Sep  2 19:56:56 2014
@@ -24,6 +24,7 @@ import org.apache.hive.service.cli.Sessi
 import org.apache.hive.service.cli.operation.OperationManager;
 import org.apache.hive.service.cli.thrift.TProtocolVersion;
 
+import java.io.File;
 import java.util.Map;
 
 /**
@@ -38,40 +39,59 @@ public interface HiveSessionBase {
    * Set the session manager for the session
    * @param sessionManager
    */
-  public void setSessionManager(SessionManager sessionManager);
+  void setSessionManager(SessionManager sessionManager);
 
   /**
    * Get the session manager for the session
    */
-  public SessionManager getSessionManager();
+  SessionManager getSessionManager();
 
   /**
    * Set operation manager for the session
    * @param operationManager
    */
-  public void setOperationManager(OperationManager operationManager);
+  void setOperationManager(OperationManager operationManager);
 
   /**
    * Initialize the session
    * @param sessionConfMap
    */
-  public void initialize(Map<String, String> sessionConfMap) throws Exception;
+  void initialize(Map<String, String> sessionConfMap) throws Exception;
 
-  public SessionHandle getSessionHandle();
+  /**
+   * Check whether operation logging is enabled and session dir is created successfully
+   */
+  boolean isOperationLogEnabled();
+
+  /**
+   * Get the session dir, which is the parent dir of operation logs
+   * @return a file representing the parent directory of operation logs
+   */
+  File getOperationLogSessionDir();
+
+  /**
+   * Set the session dir, which is the parent dir of operation logs
+   * @param operationLogRootDir the parent dir of the session dir
+   */
+  void setOperationLogSessionDir(File operationLogRootDir);
+
+  SessionHandle getSessionHandle();
+
+  String getUsername();
 
-  public String getUsername();
+  String getPassword();
 
-  public String getPassword();
+  HiveConf getHiveConf();
 
-  public HiveConf getHiveConf();
+  SessionState getSessionState();
 
-  public SessionState getSessionState();
+  String getUserName();
 
-  public String getUserName();
+  void setUserName(String userName);
 
-  public void setUserName(String userName);
+  String getIpAddress();
 
-  public String getIpAddress();
+  void setIpAddress(String ipAddress);
 
-  public void setIpAddress(String ipAddress);
+  long getLastAccessTime();
 }



Mime
View raw message