hive-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From w...@apache.org
Subject [3/3] hive git commit: HIVE-15622 : Remove HWI component from Hive (Wei Zheng, reviewed by Ashutosh Chauhan)
Date Fri, 20 Jan 2017 00:26:30 GMT
HIVE-15622 : Remove HWI component from Hive (Wei Zheng, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d63256d5
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d63256d5
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d63256d5

Branch: refs/heads/master
Commit: d63256d50c7c064dba6466c565fa62eb08b9d146
Parents: b569b49
Author: Wei Zheng <weiz@apache.org>
Authored: Thu Jan 19 16:26:13 2017 -0800
Committer: Wei Zheng <weiz@apache.org>
Committed: Thu Jan 19 16:26:13 2017 -0800

----------------------------------------------------------------------
 bin/ext/hwi.cmd                                 |  62 --
 bin/ext/hwi.sh                                  |  50 --
 bin/hive.cmd                                    |   3 -
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   6 -
 conf/hive-env.sh.template                       |   4 +-
 hwi/pom.xml                                     | 132 ----
 .../org/apache/hadoop/hive/hwi/HWIAuth.java     | 105 ---
 .../hadoop/hive/hwi/HWIContextListener.java     |  74 --
 .../apache/hadoop/hive/hwi/HWIException.java    |  48 --
 .../org/apache/hadoop/hive/hwi/HWIServer.java   | 146 ----
 .../apache/hadoop/hive/hwi/HWISessionItem.java  | 610 ----------------
 .../hadoop/hive/hwi/HWISessionManager.java      | 222 ------
 .../apache/hadoop/hive/hwi/TestHWIServer.java   |  77 --
 hwi/web/WEB-INF/web.xml                         |  34 -
 hwi/web/authorize.jsp                           |  88 ---
 hwi/web/css/bootstrap.min.css                   | 706 -------------------
 hwi/web/diagnostics.jsp                         |  84 ---
 hwi/web/error_page.jsp                          |  54 --
 hwi/web/img/glyphicons-halflings-white.png      | Bin 4352 -> 0 bytes
 hwi/web/img/glyphicons-halflings.png            | Bin 4352 -> 0 bytes
 hwi/web/index.jsp                               |  48 --
 hwi/web/left_navigation.jsp                     |  31 -
 hwi/web/navbar.jsp                              |  33 -
 hwi/web/session_create.jsp                      |  84 ---
 hwi/web/session_diagnostics.jsp                 |  77 --
 hwi/web/session_history.jsp                     | 125 ----
 hwi/web/session_kill.jsp                        |  65 --
 hwi/web/session_list.jsp                        |  73 --
 hwi/web/session_manage.jsp                      | 197 ------
 hwi/web/session_remove.jsp                      |  67 --
 hwi/web/session_result.jsp                      |  72 --
 hwi/web/show_database.jsp                       |  71 --
 hwi/web/show_databases.jsp                      |  62 --
 hwi/web/show_table.jsp                          | 184 -----
 hwi/web/view_file.jsp                           |  79 ---
 packaging/pom.xml                               |   5 -
 packaging/src/main/assembly/src.xml             |   1 -
 pom.xml                                         |   1 -
 38 files changed, 2 insertions(+), 3778 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/bin/ext/hwi.cmd
----------------------------------------------------------------------
diff --git a/bin/ext/hwi.cmd b/bin/ext/hwi.cmd
deleted file mode 100644
index e081bc5..0000000
--- a/bin/ext/hwi.cmd
+++ /dev/null
@@ -1,62 +0,0 @@
-@echo off
-@rem Licensed to the Apache Software Foundation (ASF) under one or more
-@rem contributor license agreements.  See the NOTICE file distributed with
-@rem this work for additional information regarding copyright ownership.
-@rem The ASF licenses this file to You under the Apache License, Version 2.0
-@rem (the "License"); you may not use this file except in compliance with
-@rem the License.  You may obtain a copy of the License at
-@rem
-@rem     http://www.apache.org/licenses/LICENSE-2.0
-@rem
-@rem Unless required by applicable law or agreed to in writing, software
-@rem distributed under the License is distributed on an "AS IS" BASIS,
-@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-@rem See the License for the specific language governing permissions and
-@rem limitations under the License.
-
-set CLASS=org.apache.hadoop.hive.hwi.HWIServer
-pushd %HIVE_LIB%
-for /f %%a IN ('dir /b hive-hwi-*.jar') do (
-	set JAR=%HIVE_LIB%\%%a
-)
-popd
-
-if [%1]==[hwi_help] goto :hwi_help
-
-if [%1]==[hwi_catservice] goto :hwi_catservice
-
-:hwi
-  @rem set the hwi jar and war files
-	pushd %HIVE_LIB%
-	for /f %%a IN ('dir /b hive-hwi-*') do (
-		call :ProcessFileName %%a
-	)
-	popd
-
-  @rem hadoop 20 or newer - skip the aux_jars option and hiveconf
-	call %HIVE_BIN_PATH%\ext\util\execHiveCmd.cmd %CLASS%
-goto :EOF
-
-@rem process the hwi files
-:ProcessFileName
-	set temp=%1
-	set temp=%temp:~-3%
-
-	if %temp%==jar set HWI_JAR_FILE=lib\%1
-
-	if %temp%==war set HWI_WAR_FILE=lib\%1
-goto :EOF
-
-:hwi_help
-  echo "Usage ANT_LIB=XXXX hive --service hwi"
-goto :EOF
-
-:hwi_catservice
-@echo ^<service^>
-@echo   ^<id^>HWI^</id^>
-@echo   ^<name^>HWI^</name^>
-@echo   ^<description^>Hadoop HWI Service^</description^>
-@echo   ^<executable^>%JAVA_HOME%\bin\java^</executable^>
-@echo   ^<arguments^>%JAVA_HEAP_MAX% %HADOOP_OPTS% %AUX_PARAM% -classpath %CLASSPATH% %CLASS% %HIVE_OPTS%^</arguments^>
-@echo ^</service^>
-goto :EOF

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/bin/ext/hwi.sh
----------------------------------------------------------------------
diff --git a/bin/ext/hwi.sh b/bin/ext/hwi.sh
deleted file mode 100644
index f9cd8ec..0000000
--- a/bin/ext/hwi.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-THISSERVICE=hwi
-export SERVICE_LIST="${SERVICE_LIST}${THISSERVICE} "
-
-hwi() {
-
-  if $cygwin; then
-    HIVE_LIB=`cygpath -w "$HIVE_LIB"`
-  fi
-
-  CLASS=org.apache.hadoop.hive.hwi.HWIServer
-  # The ls hack forces the * to be expanded which is required because 
-  # System.getenv doesn't do globbing
-  export HWI_JAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.jar)
-  export HWI_WAR_FILE=$(ls ${HIVE_LIB}/hive-hwi-*.war)
-
-  #hwi requires ant jars
-  if [ "$ANT_LIB" = "" ] ; then
-    ANT_LIB=/opt/ant/lib
-  fi
-  for f in ${ANT_LIB}/*.jar; do
-    if [[ ! -f $f ]]; then
-      continue;
-    fi
-    HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f
-  done
-
-  export HADOOP_CLASSPATH
-  
-  # hadoop 20 or newer - skip the aux_jars option and hiveconf
-  exec $HADOOP jar ${HWI_JAR_FILE} $CLASS $HIVE_OPTS "$@"
-}
-
-hwi_help(){
-  echo "Usage ANT_LIB=XXXX hive --service hwi"	
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/bin/hive.cmd
----------------------------------------------------------------------
diff --git a/bin/hive.cmd b/bin/hive.cmd
index eb20531..a1e4806 100644
--- a/bin/hive.cmd
+++ b/bin/hive.cmd
@@ -343,9 +343,6 @@ goto :EOF
 	set VAR%SERVICE_COUNT%=hiveserver2
 
 	set /a SERVICE_COUNT = %SERVICE_COUNT% + 1
-	set VAR%SERVICE_COUNT%=hwi
-
-	set /a SERVICE_COUNT = %SERVICE_COUNT% + 1
 	set VAR%SERVICE_COUNT%=jar
 
 	set /a SERVICE_COUNT = %SERVICE_COUNT% + 1

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 7ceb322..7f1f91a 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1228,12 +1228,6 @@ public class HiveConf extends Configuration {
         "Read from a binary stream and treat each hive.binary.record.max.length bytes as a record. \n" +
         "The last record before the end of stream can have less than hive.binary.record.max.length bytes"),
 
-    // HWI
-    HIVEHWILISTENHOST("hive.hwi.listen.host", "0.0.0.0", "This is the host address the Hive Web Interface will listen on"),
-    HIVEHWILISTENPORT("hive.hwi.listen.port", "9999", "This is the port the Hive Web Interface will listen on"),
-    HIVEHWIWARFILE("hive.hwi.war.file", "${env:HWI_WAR_FILE}",
-        "This sets the path to the HWI war file, relative to ${HIVE_HOME}. "),
-
     HIVEHADOOPMAXMEM("hive.mapred.local.mem", 0, "mapper/reducer memory in local mode"),
 
     //small table file size

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/conf/hive-env.sh.template
----------------------------------------------------------------------
diff --git a/conf/hive-env.sh.template b/conf/hive-env.sh.template
index d621edd..72e78d5 100644
--- a/conf/hive-env.sh.template
+++ b/conf/hive-env.sh.template
@@ -19,7 +19,7 @@
 # the Hive installation (so that users do not have to set environment variables
 # or set command line parameters to get correct behavior).
 #
-# The hive service being invoked (CLI/HWI etc.) is available via the environment
+# The hive service being invoked (CLI etc.) is available via the environment
 # variable SERVICE
 
 
@@ -41,7 +41,7 @@
 #
 # Larger heap size may be required when running queries over large number of files or partitions. 
 # By default hive shell scripts use a heap size of 256 (MB).  Larger heap size would also be 
-# appropriate for hive server (hwi etc).
+# appropriate for hive server.
 
 
 # Set HADOOP_HOME to point to a specific hadoop install directory

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/pom.xml
----------------------------------------------------------------------
diff --git a/hwi/pom.xml b/hwi/pom.xml
deleted file mode 100644
index 4e27be8..0000000
--- a/hwi/pom.xml
+++ /dev/null
@@ -1,132 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <parent>
-    <groupId>org.apache.hive</groupId>
-    <artifactId>hive</artifactId>
-    <version>2.2.0-SNAPSHOT</version>
-    <relativePath>../pom.xml</relativePath>
-  </parent>
-
-  <artifactId>hive-hwi</artifactId>
-  <packaging>jar</packaging>
-  <name>Hive HWI</name>
-
-  <properties>
-    <hive.path.to.root>..</hive.path.to.root>
-  </properties>
-
-  <dependencies>
-    <!-- dependencies are always listed in sorted order by groupId, artifectId -->
-    <!-- intra-project -->
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-cli</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-common</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-shims</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-exec</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-    <!-- inter-project -->
-    <dependency>
-      <groupId>org.eclipse.jetty.aggregate</groupId>
-      <artifactId>jetty-all-server</artifactId>
-      <version>${jetty.version}</version>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <version>${hadoop.version}</version>
-      <optional>true</optional>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-common</artifactId>
-      <version>${hadoop.version}</version>
-      <optional>true</optional>
-      <scope>test</scope>
-             <exclusions>
-            <exclusion>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>commmons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-        </exclusions>
-   </dependency>
-    <!-- test intra-project -->
-    <dependency>
-      <groupId>org.apache.hive</groupId>
-      <artifactId>hive-exec</artifactId>
-      <version>${project.version}</version>
-      <classifier>tests</classifier>
-      <scope>test</scope>
-    </dependency>
-    <!-- test inter-project -->
-    <dependency>
-      <groupId>commons-httpclient</groupId>
-      <artifactId>commons-httpclient</artifactId>
-      <version>${commons-httpclient.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <version>${junit.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-mapreduce-client-core</artifactId>
-      <version>${hadoop.version}</version>
-      <optional>true</optional>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <sourceDirectory>${basedir}/src/java</sourceDirectory>
-    <testSourceDirectory>${basedir}/src/test</testSourceDirectory>
-    <plugins>
-    <!-- plugins are always listed in sorted order by groupId, artifectId -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-war-plugin</artifactId>
-        <configuration>
-          <warSourceDirectory>${basedir}/web</warSourceDirectory>
-          <packagingExcludes>WEB-INF/lib/*</packagingExcludes>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-
-</project>

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java
----------------------------------------------------------------------
diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java
deleted file mode 100644
index d4c1c59..0000000
--- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIAuth.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hwi;
-
-/**
- * Represents an authenticated user. This class is stored in the users session.
- * It is also used as a key for the HiveSessionManager
- */
-public class HWIAuth implements Comparable {
-  private String user;
-  private String[] groups;
-
-  public HWIAuth() {
-
-  }
-
-  public String getUser() {
-    return user;
-  }
-
-  public void setUser(String user) {
-    this.user = user;
-  }
-
-  public String[] getGroups() {
-    return groups;
-  }
-
-  public void setGroups(String[] groups) {
-    this.groups = groups;
-  }
-
-  /**
-   * HWIAuth is used in SortedSets(s) the compartTo method is required.
-   * 
-   * @return chained call to String.compareTo based on user property
-   */
-  public int compareTo(Object obj) {
-    if (obj == null) {
-      return -1;
-    }
-    if (!(obj instanceof HWIAuth)) {
-      return -1;
-    }
-    HWIAuth o = (HWIAuth) obj;
-    return o.getUser().compareTo(user);
-  }
-
-  /**
-   * HWIAuth is used in Map(s) the hashCode method is required.
-   * 
-   * @see java.lang.Object#hashCode()
-   */
-  @Override
-  public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result = prime * result + ((user == null) ? 0 : user.hashCode());
-    return result;
-  }
-
-  /**
-   * HWIAuth is used in Map(s) the equals method is required.
-   * 
-   * @see java.lang.Object#equals(java.lang.Object)
-   */
-  @Override
-  public boolean equals(Object obj) {
-    if (this == obj) {
-      return true;
-    }
-    if (obj == null) {
-      return false;
-    }
-    if (!(obj instanceof HWIAuth)) {
-      return false;
-    }
-    HWIAuth other = (HWIAuth) obj;
-    if (user == null) {
-      if (other.user != null) {
-        return false;
-      }
-    } else if (!user.equals(other.user)) {
-      return false;
-    }
-    return true;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java
----------------------------------------------------------------------
diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java
deleted file mode 100644
index 1f5cb79..0000000
--- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIContextListener.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hwi;
-
-import javax.servlet.ServletContext;
-import javax.servlet.ServletContextEvent;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * After getting a contextInitialized event this component starts an instance of
- * the HiveSessionManager.
- * 
- */
-public class HWIContextListener implements javax.servlet.ServletContextListener {
-
-  protected static final Logger l4j = LoggerFactory.getLogger(HWIContextListener.class
-      .getName());
-
-  /**
-   * The Hive Web Interface manages multiple hive sessions. This event is used
-   * to start a Runnable, HiveSessionManager as a thread inside the servlet
-   * container.
-   * 
-   * @param sce
-   *          An event fired by the servlet context on startup
-   */
-  public void contextInitialized(ServletContextEvent sce) {
-    ServletContext sc = sce.getServletContext();
-    HWISessionManager hs = new HWISessionManager();
-    l4j.debug("HWISessionManager created.");
-    Thread t = new Thread(hs);
-    t.start();
-    l4j.debug("HWISessionManager thread started.");
-    sc.setAttribute("hs", hs);
-    l4j.debug("HWISessionManager placed in application context.");
-  }
-
-  /**
-   * When the Hive Web Interface is closing we locate the Runnable
-   * HiveSessionManager and set it's internal goOn variable to false. This
-   * should allow the application to gracefully shutdown.
-   * 
-   * @param sce
-   *          An event fired by the servlet context on context shutdown
-   */
-  public void contextDestroyed(ServletContextEvent sce) {
-    ServletContext sc = sce.getServletContext();
-    HWISessionManager hs = (HWISessionManager) sc.getAttribute("hs");
-    if (hs == null) {
-      l4j.error("HWISessionManager was not found in context");
-    } else {
-      l4j.error("HWISessionManager goOn set to false. Shutting down.");
-      hs.setGoOn(false);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java
----------------------------------------------------------------------
diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java
deleted file mode 100644
index 76200d7..0000000
--- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIException.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hwi;
-
-/**
- * HWIException.
- *
- */
-public class HWIException extends Exception {
-
-  private static final long serialVersionUID = 1L;
-
-  public HWIException() {
-    super();
-  }
-
-  /** Specify an error String with the Exception. */
-  public HWIException(String arg0) {
-    super(arg0);
-  }
-
-  /** Wrap an Exception in HWIException. */
-  public HWIException(Throwable arg0) {
-    super(arg0);
-  }
-
-  /** Specify an error String and wrap an Exception in HWIException. */
-  public HWIException(String arg0, Throwable arg1) {
-    super(arg0, arg1);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java
----------------------------------------------------------------------
diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java
deleted file mode 100644
index 5680ed9..0000000
--- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWIServer.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hwi;
-
-import java.io.File;
-import java.io.IOException;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.shims.JettyShims;
-import org.apache.hadoop.hive.shims.ShimLoader;
-
-/**
- * This is the entry point for HWI. A web server is invoked in the same manner
- * as the hive CLI. Rather then opening a command line session a web server is
- * started and a web application to work with hive is started.
- */
-public class HWIServer {
-  protected static final Logger l4j = LoggerFactory.getLogger(HWIServer.class.getName());
-
-  private JettyShims.Server webServer;
-  private final String[] args;
-
-  /**
-   * 
-   * @param args
-   *          These are the command line arguments. Usually -hiveconf.
-   * @throws java.io.IOException
-   */
-  public HWIServer(String[] args) throws IOException {
-    this.args = args;
-  }
-
-  /**
-   * This method initialized the internal Jetty Servlet Engine. It adds the hwi
-   * context path.
-   * 
-   * @throws java.io.IOException
-   *           Port already in use, bad bind etc.
-   */
-  public void start() throws IOException {
-
-    HiveConf conf = new HiveConf(this.getClass());
-
-    String listen = null;
-    int port = -1;
-
-    listen = conf.getVar(HiveConf.ConfVars.HIVEHWILISTENHOST);
-    port = conf.getIntVar(HiveConf.ConfVars.HIVEHWILISTENPORT);
-
-    if (listen.equals("")) {
-      l4j.warn("hive.hwi.listen.host was not specified defaulting to 0.0.0.0");
-      listen = "0.0.0.0";
-    }
-    if (port == -1) {
-      l4j.warn("hive.hwi.listen.port was not specified defaulting to 9999");
-      port = 9999;
-    }
-
-    String hwiWAR = conf.getVar(HiveConf.ConfVars.HIVEHWIWARFILE);
-    String hivehome = System.getenv().get("HIVE_HOME");
-    File hwiWARFile = new File(hivehome, hwiWAR);
-    if (!hwiWARFile.exists()) {
-      l4j.error("HWI WAR file not found at " + hwiWARFile.toString());
-      System.exit(1);
-    }
-
-    webServer = ShimLoader.getJettyShims().startServer(listen, port);
-    webServer.addWar(hwiWARFile.toString(), "/hwi");
-
-    /*
-     * The command line args may be used by multiple components. Rather by
-     * setting these as a system property we avoid having to specifically pass
-     * them
-     */
-    StringBuilder sb = new StringBuilder();
-    for (String arg : args) {
-      sb.append(arg + " ");
-    }
-    System.setProperty("hwi-args", sb.toString());
-
-    try {
-      while (true) {
-        try {
-          webServer.start();
-          webServer.join();
-          l4j.debug(" HWI Web Server is started.");
-          break;
-        } catch (org.mortbay.util.MultiException ex) {
-          throw ex;
-        }
-      }
-    } catch (IOException ie) {
-      throw ie;
-    } catch (Exception e) {
-      IOException ie = new IOException("Problem starting HWI server");
-      ie.initCause(e);
-      l4j.error("Parsing hwi.listen.port caused exception ", e);
-      throw ie;
-    }
-  }
-
-  /**
-   * 
-   * @param args
-   *          as of now no arguments are supported
-   * @throws java.lang.Exception
-   *           Could be thrown if due to issues with Jetty or bad configuration
-   *           options
-   * 
-   */
-  public static void main(String[] args) throws Exception {
-    HWIServer hwi = new HWIServer(args);
-    l4j.info("HWI is starting up");
-    hwi.start();
-  }
-
-  /**
-   * Shut down the running HWI Server.
-   * 
-   * @throws Exception
-   *           Running Thread.stop() can and probably will throw this
-   */
-  public void stop() throws Exception {
-    l4j.info("HWI is shutting down");
-    webServer.stop();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
----------------------------------------------------------------------
diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
deleted file mode 100644
index f14608c..0000000
--- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionItem.java
+++ /dev/null
@@ -1,610 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.hwi;
-
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.util.ArrayList;
-import java.util.List;
-import java.sql.SQLException;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.cli.CliSessionState;
-import org.apache.hadoop.hive.cli.OptionsProcessor;
-import org.apache.hadoop.hive.common.LogUtils;
-import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.CommandNeedRetryException;
-import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.history.HiveHistoryViewer;
-import org.apache.hadoop.hive.ql.processors.CommandProcessor;
-import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
-import org.apache.hadoop.hive.ql.session.SessionState;
-
-/**
- * HWISessionItem can be viewed as a wrapper for a Hive shell. With it the user
- * has a session on the web server rather then in a console window.
- *
- */
-public class HWISessionItem implements Runnable, Comparable<HWISessionItem> {
-
-  protected static final Logger l4j = LoggerFactory.getLogger(HWISessionItem.class
-      .getName());
-
-  /** Represents the state a session item can be in. */
-  public enum WebSessionItemStatus {
-    NEW, READY, QUERY_SET, QUERY_RUNNING, DESTROY, KILL_QUERY
-  };
-
-  /** The Web Interface sessionName this is used to identify the session. */
-  private final String sessionName;
-
-  /**
-   * Respresents the current status of the session. Used by components to
-   * determine state. Operations will throw exceptions if the item is not in the
-   * correct state.
-   */
-  private HWISessionItem.WebSessionItemStatus status;
-
-  private CliSessionState ss;
-
-  /** Standard out from the session will be written to this local file. */
-  private String resultFile;
-
-  /** Standard error from the session will be written to this local file. */
-  private String errorFile;
-
-  /**
-   * The results from the Driver. This is used for storing the most result
-   * results from the driver in memory.
-   */
-  private ArrayList<ArrayList<String>> resultBucket;
-
-  /** Limits the resultBucket to be no greater then this size. */
-  private int resultBucketMaxSize;
-
-  /** List of queries that this item should/has operated on. */
-  private List<String> queries;
-
-  /** status code results of queries. */
-  private List<Integer> queryRet;
-
-  /** Reference to the configuration. */
-  private HiveConf conf;
-
-  /** User privileges. */
-  private HWIAuth auth;
-
-  public Thread runnable;
-
-  /**
-   * Threading SessionState issues require us to capture a reference to the hive
-   * history file and store it.
-   */
-  private String historyFile;
-
-  /**
-   * Creates an instance of WebSessionItem, sets status to NEW.
-   */
-  public HWISessionItem(HWIAuth auth, String sessionName) {
-    this.auth = auth;
-    this.sessionName = sessionName;
-    l4j.debug("HWISessionItem created");
-    status = WebSessionItemStatus.NEW;
-    queries = new ArrayList<String>();
-    queryRet = new ArrayList<Integer>();
-    resultBucket = new ArrayList<ArrayList<String>>();
-    resultBucketMaxSize = 1000;
-    runnable = new Thread(this);
-    runnable.start();
-
-    l4j.debug("Wait for NEW->READY transition");
-    synchronized (runnable) {
-      if (status != WebSessionItemStatus.READY) {
-        try {
-          runnable.wait();
-        } catch (Exception ex) {
-        }
-      }
-    }
-    l4j.debug("NEW->READY transition complete");
-  }
-
-  /**
-   * This is the initialization process that is carried out for each
-   * SessionItem. The goal is to emulate the startup of CLIDriver.
-   */
-  private void itemInit() {
-    l4j.debug("HWISessionItem itemInit start " + getSessionName());
-    OptionsProcessor oproc = new OptionsProcessor();
-
-    if (System.getProperty("hwi-args") != null) {
-      String[] parts = System.getProperty("hwi-args").split("\\s+");
-      if (!oproc.process_stage1(parts)) {
-      }
-    }
-
-    try {
-      LogUtils.initHiveLog4j();
-    } catch (LogInitializationException e) {
-      l4j.warn("Initialization Error", e);
-    }
-    conf = new HiveConf(SessionState.class);
-    ss = new CliSessionState(conf);
-    SessionState.start(ss);
-    queries.add("set hadoop.job.ugi=" + auth.getUser() + ","
-        + auth.getGroups()[0]);
-    queries.add("set user.name=" + auth.getUser());
-    /*
-     * HiveHistoryFileName will not be accessible outside this thread. We must
-     * capture this now.
-     */
-    historyFile = SessionState.get().getHiveHistory().getHistFileName();
-    l4j.debug("HWISessionItem itemInit Complete " + getSessionName());
-    status = WebSessionItemStatus.READY;
-
-    synchronized (runnable) {
-      runnable.notifyAll();
-    }
-  }
-
-  /**
-   * HWISessionItem is a Runnable instance. Calling this method will change the
-   * status to QUERY_SET and notify(). The run method detects this and then
-   * continues processing.
-   */
-  public void clientStart() throws HWIException {
-    if (status == WebSessionItemStatus.QUERY_RUNNING) {
-      throw new HWIException("Query already running");
-    }
-    status = WebSessionItemStatus.QUERY_SET;
-    synchronized (runnable) {
-      runnable.notifyAll();
-    }
-    l4j.debug(getSessionName() + " Query is set to start");
-  }
-
-  public void clientKill() throws HWIException {
-    if (status != WebSessionItemStatus.QUERY_RUNNING) {
-      throw new HWIException("Can not kill that which is not running.");
-    }
-    status = WebSessionItemStatus.KILL_QUERY;
-    l4j.debug(getSessionName() + " Query is set to KILL_QUERY");
-  }
-
-  /** This method clears the private member variables. */
-  public void clientRenew() throws HWIException {
-    throwIfRunning();
-    queries = new ArrayList<String>();
-    queryRet = new ArrayList<Integer>();
-    resultBucket = new ArrayList<ArrayList<String>>();
-    resultFile = null;
-    errorFile = null;
-    // this.conf = null;
-    // this.ss = null;
-    status = WebSessionItemStatus.NEW;
-    l4j.debug(getSessionName() + " Query is renewed to start");
-  }
-
-  /**
-   * This is a callback style function used by the HiveSessionManager. The
-   * HiveSessionManager notices this and attempts to stop the query.
-   */
-  protected void killIt() {
-    l4j.debug(getSessionName() + " Attempting kill.");
-    if (runnable != null) {
-      try {
-        runnable.join(1000);
-        l4j.debug(getSessionName() + " Thread join complete");
-      } catch (InterruptedException e) {
-        l4j.error(getSessionName() + " killing session caused exception ", e);
-      }
-    }
-  }
-
-  /**
-   * Helper function to get configuration variables.
-   *
-   * @param wanted
-   *          a ConfVar
-   * @return Value of the configuration variable.
-   */
-  public String getHiveConfVar(HiveConf.ConfVars wanted) throws HWIException {
-    String result = null;
-    try {
-      result = ss.getConf().getVar(wanted);
-    } catch (Exception ex) {
-      throw new HWIException(ex);
-    }
-    return result;
-  }
-
-  public String getHiveConfVar(String s) throws HWIException {
-    String result = null;
-    try {
-      result = conf.get(s);
-    } catch (Exception ex) {
-      throw new HWIException(ex);
-    }
-    return result;
-  }
-
-  /*
-   * mapred.job.tracker could be host:port or just local
-   * mapred.job.tracker.http.address could be host:port or just host In some
-   * configurations http.address is set to 0.0.0.0 we are combining the two
-   * variables to provide a url to the job tracker WUI if it exists. If hadoop
-   * chose the first available port for the JobTracker HTTP port will can not
-   * determine it.
-   */
-  public String getJobTrackerURL(String jobid) throws HWIException {
-    String jt = this.getHiveConfVar("mapred.job.tracker");
-    String jth = this.getHiveConfVar("mapred.job.tracker.http.address");
-    String[] jtparts = null;
-    String[] jthttpParts = null;
-    if (jt.equalsIgnoreCase("local")) {
-      jtparts = new String[2];
-      jtparts[0] = "local";
-      jtparts[1] = "";
-    } else {
-      jtparts = jt.split(":");
-    }
-    if (jth.contains(":")) {
-      jthttpParts = jth.split(":");
-    } else {
-      jthttpParts = new String[2];
-      jthttpParts[0] = jth;
-      jthttpParts[1] = "";
-    }
-    return jtparts[0] + ":" + jthttpParts[1] + "/jobdetails.jsp?jobid=" + jobid
-        + "&refresh=30";
-  }
-
-  @Override
-  /*
-   * HWISessionItem uses a wait() notify() system. If the thread detects conf to
-   * be null, control is transfered to initItem(). A status of QUERY_SET causes
-   * control to transfer to the runQuery() method. DESTROY will cause the run
-   * loop to end permanently.
-   */
-  public void run() {
-    synchronized (runnable) {
-      while (status != HWISessionItem.WebSessionItemStatus.DESTROY) {
-        if (status == WebSessionItemStatus.NEW) {
-          itemInit();
-        }
-
-        if (status == WebSessionItemStatus.QUERY_SET) {
-          runQuery();
-        }
-
-        try {
-          runnable.wait();
-        } catch (InterruptedException e) {
-          l4j.error("in wait() state ", e);
-        }
-      } // end while
-    } // end sync
-  } // end run
-
-  /**
-   * runQuery iterates the list of queries executing each query.
-   */
-  public void runQuery() {
-    FileOutputStream fos = null;
-    if (getResultFile() != null) {
-      try {
-        fos = new FileOutputStream(new File(resultFile));
-        ss.out = new PrintStream(fos, true, "UTF-8");
-      } catch (java.io.FileNotFoundException fex) {
-        l4j.error(getSessionName() + " opening resultfile " + resultFile, fex);
-      } catch (java.io.UnsupportedEncodingException uex) {
-        l4j.error(getSessionName() + " opening resultfile " + resultFile, uex);
-      }
-    } else {
-      l4j.debug(getSessionName() + " Output file was not specified");
-    }
-    l4j.debug(getSessionName() + " state is now QUERY_RUNNING.");
-    status = WebSessionItemStatus.QUERY_RUNNING;
-
-    // expect one return per query
-    queryRet = new ArrayList<Integer>(queries.size());
-    for (int i = 0; i < queries.size(); i++) {
-      String cmd = queries.get(i);
-      String cmd_trimmed = cmd.trim();
-      String[] tokens = cmd_trimmed.split("\\s+");
-      String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
-      CommandProcessor proc = null;
-      try {
-        proc = CommandProcessorFactory.get(tokens[0]);
-      } catch (SQLException e) {
-        l4j.error(getSessionName() + " error processing " + cmd, e);
-      }
-      if (proc != null) {
-        if (proc instanceof Driver) {
-          Driver qp = (Driver) proc;
-          qp.setTryCount(Integer.MAX_VALUE);
-          try {
-          queryRet.add(Integer.valueOf(qp.run(cmd).getResponseCode()));
-          ArrayList<String> res = new ArrayList<String>();
-          try {
-            while (qp.getResults(res)) {
-              ArrayList<String> resCopy = new ArrayList<String>();
-              resCopy.addAll(res);
-              resultBucket.add(resCopy);
-              if (resultBucket.size() > resultBucketMaxSize) {
-                resultBucket.remove(0);
-              }
-              for (String row : res) {
-                if (ss != null) {
-                  if (ss.out != null) {
-                    ss.out.println(row);
-                  }
-                } else {
-                  throw new RuntimeException("ss was null");
-                }
-              }
-              res.clear();
-            }
-
-          } catch (IOException ex) {
-            l4j.error(getSessionName() + " getting results " + getResultFile()
-                + " caused exception.", ex);
-          }
-          } catch (CommandNeedRetryException e) {
-            // this should never happen since we Driver.setTryCount(Integer.MAX_VALUE)
-            l4j.error(getSessionName() + " Exception when executing", e);
-          } finally {
-            qp.close();
-          }
-        } else {
-          try {
-            queryRet.add(Integer.valueOf(proc.run(cmd_1).getResponseCode()));
-          } catch (CommandNeedRetryException e) {
-            // this should never happen if there is no bug
-            l4j.error(getSessionName() + " Exception when executing", e);
-          }
-        }
-      } else {
-        // processor was null
-        l4j.error(getSessionName()
-            + " query processor was not found for query " + cmd);
-      }
-    } // end for
-
-    // cleanup
-    try {
-      if (fos != null) {
-        fos.close();
-      }
-    } catch (IOException ex) {
-      l4j.error(getSessionName() + " closing result file " + getResultFile()
-          + " caused exception.", ex);
-    }
-    status = WebSessionItemStatus.READY;
-    l4j.debug(getSessionName() + " state is now READY");
-    synchronized (runnable) {
-      runnable.notifyAll();
-    }
-  }
-
-  /**
-   * This is a chained call to SessionState.setIsSilent(). Use this if you do
-   * not want the result file to have information status
-   */
-  public void setSSIsSilent(boolean silent) throws HWIException {
-    if (ss == null) {
-      throw new HWIException("Session State is null");
-    }
-    ss.setIsSilent(silent);
-  }
-
-  /**
-   * This is a chained call to SessionState.getIsSilent().
-   */
-  public boolean getSSIsSilent() throws HWIException {
-    if (ss == null) {
-      throw new HWIException("Session State is null");
-    }
-    return ss.getIsSilent();
-  }
-
-  /** to support sorting/Set. */
-  public int compareTo(HWISessionItem other) {
-    if (other == null) {
-      return -1;
-    }
-    return getSessionName().compareTo(other.getSessionName());
-  }
-
-  /**
-   *
-   * @return the HiveHistoryViewer for the session
-   * @throws HWIException
-   */
-  public HiveHistoryViewer getHistoryViewer() throws HWIException {
-    if (ss == null) {
-      throw new HWIException("Session state was null");
-    }
-    /*
-     * we can not call this.ss.get().getHiveHistory().getHistFileName() directly
-     * as this call is made from a a Jetty thread and will return null
-     */
-    HiveHistoryViewer hv = new HiveHistoryViewer(historyFile);
-    return hv;
-  }
-
-  /**
-   * Uses the sessionName property to compare to sessions.
-   *
-   * @return true if sessionNames are equal false otherwise
-   */
-  @Override
-  public boolean equals(Object other) {
-    if (other == null) {
-      return false;
-    }
-    if (!(other instanceof HWISessionItem)) {
-      return false;
-    }
-    HWISessionItem o = (HWISessionItem) other;
-    if (getSessionName().equals(o.getSessionName())) {
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  public String getResultFile() {
-    return resultFile;
-  }
-
-  public void setResultFile(String resultFile) {
-    this.resultFile = resultFile;
-  }
-
-  /**
-   * The session name is an identifier to recognize the session.
-   *
-   * @return the session's name
-   */
-  public String getSessionName() {
-    return sessionName;
-  }
-
-  /**
-   * Used to represent to the user and other components what state the
-   * HWISessionItem is in. Certain commands can only be run when the application
-   * is in certain states.
-   *
-   * @return the current status of the session
-   */
-  public WebSessionItemStatus getStatus() {
-    return status;
-  }
-
-  /**
-   * Currently unused.
-   *
-   * @return a String with the full path to the error file.
-   */
-  public String getErrorFile() {
-    return errorFile;
-  }
-
-  /**
-   * Currently unused.
-   *
-   * @param errorFile
-   *          the full path to the file for results.
-   */
-  public void setErrorFile(String errorFile) {
-    this.errorFile = errorFile;
-  }
-
-  /**
-   * @return the auth
-   */
-  public HWIAuth getAuth() {
-    return auth;
-  }
-
-  /**
-   * @param auth
-   *          the auth to set
-   */
-  protected void setAuth(HWIAuth auth) {
-    this.auth = auth;
-  }
-
-  /** Returns an unmodifiable list of queries. */
-  public List<String> getQueries() {
-    return java.util.Collections.unmodifiableList(queries);
-  }
-
-  /**
-   * Adds a new query to the execution list.
-   *
-   * @param query
-   *          query to be added to the list
-   */
-  public void addQuery(String query) throws HWIException {
-    throwIfRunning();
-    queries.add(query);
-  }
-
-  /**
-   * Removes a query from the execution list.
-   *
-   * @param item
-   *          the 0 based index of the item to be removed
-   */
-  public void removeQuery(int item) throws HWIException {
-    throwIfRunning();
-    queries.remove(item);
-  }
-
-  public void clearQueries() throws HWIException {
-    throwIfRunning();
-    queries.clear();
-  }
-
-  /** returns the value for resultBucketMaxSize. */
-  public int getResultBucketMaxSize() {
-    return resultBucketMaxSize;
-  }
-
-  /**
-   * sets the value for resultBucketMaxSize.
-   *
-   * @param size
-   *          the new size
-   */
-  public void setResultBucketMaxSize(int size) {
-    resultBucketMaxSize = size;
-  }
-
-  /** gets the value for resultBucket. */
-  public ArrayList<ArrayList<String>> getResultBucket() {
-    return resultBucket;
-  }
-
-  /**
-   * The HWISessionItem stores the result of each query in an array.
-   *
-   * @return unmodifiable list of return codes
-   */
-  public List<Integer> getQueryRet() {
-    return java.util.Collections.unmodifiableList(queryRet);
-  }
-
-  /**
-   * If the ItemStatus is QueryRunning most of the configuration is in a read
-   * only state.
-   */
-  private void throwIfRunning() throws HWIException {
-    if (status == WebSessionItemStatus.QUERY_RUNNING) {
-      throw new HWIException("Query already running");
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java
----------------------------------------------------------------------
diff --git a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java b/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java
deleted file mode 100644
index d6030ec..0000000
--- a/hwi/src/java/org/apache/hadoop/hive/hwi/HWISessionManager.java
+++ /dev/null
@@ -1,222 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.hwi;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.TreeSet;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * HiveSessionManager is a Runnable started inside a web application context.
- * It's basic function is to hold a collection of SessionItem(s). It also works
- * as a facade, as jsp clients can not create a Hive Session directly. Hive
- * Sessions are long lived, unlike a traditional Query and Block system clients
- * set up the query to be started with an instance of this class.
- * 
- */
-public class HWISessionManager implements Runnable {
-
-  protected static final Logger l4j = LoggerFactory.getLogger(HWISessionManager.class
-      .getName());
-
-  private boolean goOn;
-  private TreeMap<HWIAuth, Set<HWISessionItem>> items;
-
-  protected HWISessionManager() {
-    goOn = true;
-    items = new TreeMap<HWIAuth, Set<HWISessionItem>>();
-  }
-
-  /**
-   * This method scans the SessionItem collection. If a SessionItem is in the
-   * QUERY_SET state that signals that its thread should be started. If the
-   * SessionItem is in the DESTROY state it should be cleaned up and removed
-   * from the collection. Currently we are using a sleep. A wait/notify could be
-   * implemented. Queries will run for a long time, a one second wait on start
-   * will not be noticed.
-   * 
-   */
-  public void run() {
-    l4j.debug("Entered run() thread has started");
-    while (goOn) {
-      l4j.debug("locking items");
-      synchronized (items) {
-
-        for (HWIAuth a : items.keySet()) {
-          for (HWISessionItem i : items.get(a)) {
-            if (i.getStatus() == HWISessionItem.WebSessionItemStatus.DESTROY) {
-              items.get(a).remove(i);
-            }
-            if (i.getStatus() == HWISessionItem.WebSessionItemStatus.KILL_QUERY) {
-              l4j.debug("Killing item: " + i.getSessionName());
-              i.killIt();
-              l4j.debug("Killed item: " + i.getSessionName());
-              items.get(a).remove(i);
-            }
-          }
-        }
-
-      } // end sync
-      try {
-        Thread.sleep(100);
-      } catch (InterruptedException ex) {
-        l4j.error("Could not sleep ", ex);
-      }
-    } // end while
-    l4j.debug("goOn is false. Loop has ended.");
-    // Cleanup used here to stop all threads
-    synchronized (items) {
-      for (HWIAuth a : items.keySet()) {
-        for (HWISessionItem i : items.get(a)) {
-          try {
-            if (i.getStatus() == HWISessionItem.WebSessionItemStatus.QUERY_RUNNING) {
-              l4j.debug(i.getSessionName() + "Joining ");
-              i.runnable.join(1000);
-              l4j.debug(i.getSessionName() + "Joined ");
-            }
-          } catch (InterruptedException ex) {
-            l4j.error(i.getSessionName() + "while joining ", ex);
-          }
-        }
-      }
-    }
-  } // end run
-
-  protected boolean isGoOn() {
-    return goOn;
-  }
-
-  protected void setGoOn(boolean goOn) {
-    this.goOn = goOn;
-  }
-
-  protected TreeMap<HWIAuth, Set<HWISessionItem>> getItems() {
-    return items;
-  }
-
-  protected void setItems(TreeMap<HWIAuth, Set<HWISessionItem>> items) {
-    this.items = items;
-  }
-
-  // client methods called from JSP
-  /**
-   * Rather then return the actual items we return a list copies. This enforces
-   * our HWISessionManager by preventing the ability of the client(jsp) to
-   * create SessionItems.
-   * 
-   * @return A set of SessionItems this framework manages
-   */
-  public ArrayList<HWISessionItem> findAllSessionItems() {
-    ArrayList<HWISessionItem> otherItems = new ArrayList<HWISessionItem>();
-    for (HWIAuth a : items.keySet()) {
-      otherItems.addAll(items.get(a));
-    }
-    return otherItems;
-  }
-
-  /**
-   * Here we handle creating the SessionItem, we do this for the JSP client
-   * because we need to set parameters the client is not aware of. One such
-   * parameter is the command line arguments the server was started with.
-   * 
-   * @param a
-   *          Authenticated user
-   * @param sessionName
-   *          Represents the session name
-   * @return a new SessionItem or null if a session with that name already
-   *         exists
-   */
-  public HWISessionItem createSession(HWIAuth a, String sessionName) {
-
-    l4j.debug("Creating session: " + sessionName);
-
-    HWISessionItem si = null;
-
-    synchronized (items) {
-      if (findSessionItemByName(a, sessionName) == null) {
-        l4j.debug("Initializing session: " + sessionName + " a for "
-            + a.getUser());
-        si = new HWISessionItem(a, sessionName);
-
-        if (!items.containsKey(a)) {
-          l4j.debug("SessionList is empty " + a.getUser());
-          TreeSet<HWISessionItem> list = new TreeSet<HWISessionItem>();
-          list.add(si);
-          items.put(a, list);
-          l4j.debug("Item added " + si.getSessionName() + " for user "
-              + a.getUser());
-        } else {
-          items.get(a).add(si);
-          l4j.debug("Item added " + si.getSessionName() + " for user "
-              + a.getUser());
-        }
-
-      } else {
-        l4j.debug("Creating session: " + sessionName + " already exists "
-            + a.getUser());
-      }
-    }
-    return si;
-  }
-
-  /**
-   * Helper method useful when you know the session name you wish to reference.
-   * 
-   * @param sessionname
-   * @return A SessionItem matching the sessionname or null if it does not
-   *         exists
-   */
-  public HWISessionItem findSessionItemByName(HWIAuth auth, String sessionname) {
-    Collection<HWISessionItem> sessForUser = items.get(auth);
-    if (sessForUser == null) {
-      return null;
-    }
-    for (HWISessionItem si : sessForUser) {
-      if (si.getSessionName().equals(sessionname)) {
-        return si;
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Used to list all users that have at least one session.
-   * 
-   * @return keySet of items all users that have any sessions
-   */
-  public Set<HWIAuth> findAllUsersWithSessions() {
-    return items.keySet();
-  }
-
-  /**
-   * Used to list all the sessions of a user.
-   * 
-   * @param auth
-   *          the user being enquired about
-   * @return all the sessions of that user
-   */
-  public Set<HWISessionItem> findAllSessionsForUser(HWIAuth auth) {
-    return items.get(auth);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java
----------------------------------------------------------------------
diff --git a/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java b/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java
deleted file mode 100644
index 8c99eef..0000000
--- a/hwi/src/test/org/apache/hadoop/hive/hwi/TestHWIServer.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.hwi;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.Properties;
-
-import junit.framework.TestCase;
-
-import org.apache.hadoop.hive.shims.JettyShims;
-import org.apache.hadoop.hive.shims.ShimLoader;
-
-/**
- * TestHWIServer.
- *
- */
-public class TestHWIServer extends TestCase {
-
-  public TestHWIServer(String name) {
-    super(name);
-
-  }
-
-  @Override
-  protected void setUp() throws Exception {
-    super.setUp();
-
-  }
-
-  @Override
-  protected void tearDown() throws Exception {
-    super.tearDown();
-
-  }
-
-  public final void testServerInit() throws Exception {
-    StringBuilder warFile = new StringBuilder("../build/hwi/hive-hwi-");
-    Properties props = new Properties();
-
-    // try retrieve version from build.properties file
-    try {
-      props.load(new FileInputStream("../build.properties"));
-      warFile.append(props.getProperty("version")).append(".war");
-    } catch (FileNotFoundException e) {
-      e.printStackTrace();
-    } catch (IOException e) {
-      e.printStackTrace();
-    }
-
-    JettyShims.Server webServer;
-    webServer = ShimLoader.getJettyShims().startServer("0.0.0.0", 9999);
-    assertNotNull(webServer);
-    webServer.addWar(warFile.toString(), "/hwi");
-    webServer.start();
-    // webServer.join();
-    webServer.stop();
-    assert (true);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/web/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/hwi/web/WEB-INF/web.xml b/hwi/web/WEB-INF/web.xml
deleted file mode 100644
index a480b29..0000000
--- a/hwi/web/WEB-INF/web.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-  <web-app version="2.5" xmlns="http://java.sun.com/xml/ns/javaee" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://java.sun.com/xml/ns/javaee http://java.sun.com/xml/ns/javaee/web-app_2_5.xsd">  
-  
-  <listener>
-    <description>Used to manage Hive Sessions</description>
-    <listener-class>org.apache.hadoop.hive.hwi.HWIContextListener</listener-class>
-  </listener> 
-  
-  <session-config>
-    <session-timeout>
-      30
-    </session-timeout>
-  </session-config>
-  <welcome-file-list>
-    <welcome-file>index.jsp</welcome-file>
-  </welcome-file-list>
-</web-app>

http://git-wip-us.apache.org/repos/asf/hive/blob/d63256d5/hwi/web/authorize.jsp
----------------------------------------------------------------------
diff --git a/hwi/web/authorize.jsp b/hwi/web/authorize.jsp
deleted file mode 100644
index e29f4d4..0000000
--- a/hwi/web/authorize.jsp
+++ /dev/null
@@ -1,88 +0,0 @@
-<%--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
---%>
-<!DOCTYPE html>
-<%@ page import="org.apache.hadoop.hive.hwi.*"%>
-<%@page errorPage="error_page.jsp"%>
-<%
-	HWIAuth auth = (HWIAuth) session.getAttribute("auth");
-	if (auth == null) {
-		auth = new HWIAuth();
-		auth.setUser("");
-		auth.setGroups(new String[] { "" });
-		session.setAttribute("auth", auth);
-	}
-%>
-<%
-	String user = request.getParameter("user");
-	String groups = request.getParameter("groups");
-	if (user != null) {
-		auth.setUser(user);
-		auth.setGroups(groups.split("\\s+"));
-		session.setAttribute("auth", auth);
-	}
-%>
-<html>
-<head>
-<title>Authorize</title>
-<link href="css/bootstrap.min.css" rel="stylesheet">
-</head>
-<body style="padding-top: 60px;">
-    <jsp:include page="/navbar.jsp"></jsp:include>
-	<div class="container">
-		<div class="row">
-			<div class="span4">
-				<jsp:include page="/left_navigation.jsp" />
-			</div>
-			<div class="span8">
-
-				<%
-					if (request.getParameter("user") != null) {
-				%>
-				<div class="alert alert-success">
-					<p>Authorization is complete.</p>
-				</div>
-				<%
-					}
-				%>
-				<form action="authorize.jsp" class="form-horizontal">
-					<fieldset>
-					    <legend>Change User Info</legend>
-						<div class="control-group">
-							<label class="control-label" for="flduser">User</label>
-							<div class="controls">
-								<input id="flduser" type="text" name="user"
-									value="<%=auth.getUser()%>">
-							</div>
-						</div>
-
-						<div class="control-group">
-							<label class="control-label" for="fldgroups">Groups</label>
-							<div class="controls">
-								<input id="fldgroups" type="text" name="groups"
-									value="<% for (String group : auth.getGroups()) { out.print(group); } %>">
-							</div>
-						</div>
-					</fieldset>
-					<div class="form-actions">
-						<button type="submit" class="btn btn-primary">Submit</button>
-					</div>
-				</form>
-			</div><!-- span8 -->
-		</div><!-- row -->
-	</div><!-- container -->
-</body>
-</html>


Mime
View raw message