ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dbhowm...@apache.org
Subject [01/21] ambari git commit: AMBARI-16963: JDBC implementation of hive view. (dipayanb)
Date Tue, 31 May 2016 19:45:22 GMT
Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 184e7e29d -> f3df02522


http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/contrib/views/hive-next/src/main/resources/view.log4j.properties
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/view.log4j.properties b/contrib/views/hive-next/src/main/resources/view.log4j.properties
new file mode 100644
index 0000000..b1bd5f2
--- /dev/null
+++ b/contrib/views/hive-next/src/main/resources/view.log4j.properties
@@ -0,0 +1,31 @@
+# Copyright 2011 The Apache Software Foundation
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+log4j.appender.hiveNextView=org.apache.log4j.RollingFileAppender
+log4j.appender.hiveNextView.File=/var/log/ambari-server/hive-next-view/hive-view.log
+log4j.appender.hiveNextView.MaxFileSize=80MB
+log4j.appender.hiveNextView.MaxBackupIndex=60
+log4j.appender.hiveNextView.layout=org.apache.log4j.PatternLayout
+log4j.appender.hiveNextView.layout.ConversionPattern=%d{DATE} %5p [%t] %c{1}:%L - %m%n
+
+log4j.logger.org.apache.ambari.view.hive2=INFO,hiveNextView
+log4j.logger.org.apache.hive.jdbc=INFO,hiveNextView
+log4j.logger.akka.actor=INFO,hiveNextView
+log4j.additivity.org.apache.ambari.view.hive2 = false
+log4j.additivity.org.apache.hive.jdbc = false
+log4j.additivity.akka.actor = false

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/contrib/views/hive-next/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/main/resources/view.xml b/contrib/views/hive-next/src/main/resources/view.xml
new file mode 100644
index 0000000..c2d2dc5
--- /dev/null
+++ b/contrib/views/hive-next/src/main/resources/view.xml
@@ -0,0 +1,298 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<view>
+    <name>HIVE</name>
+    <label>Hive</label>
+    <version>2.0.0</version>
+    <build>${env.BUILD_NUMBER}</build>
+
+    <min-ambari-version>2.0.*</min-ambari-version>
+
+    <validator-class>org.apache.ambari.view.hive2.PropertyValidator</validator-class>
+    <view-class>org.apache.ambari.view.hive2.HiveViewImpl</view-class>
+
+    <!-- Hive Configs -->
+    <parameter>
+      <name>hive.jdbc.url</name>
+      <description>Enter JDBC Url to connect to Hive Server 2</description>
+      <label>HiveServer2 JDBC Url</label>
+      <placeholder>jdbc:hive2://127.0.0.1:10000</placeholder>
+      <cluster-config>fake</cluster-config>
+      <required>true</required>
+    </parameter>
+
+    <parameter>
+      <name>hive.session.params</name>
+      <description>Semicolon-separated key value parameters to be used in JDBC URL
generation to connect to hive server 2</description>
+      <label>Hive Session Parameters</label>
+      <placeholder>transportMode=http;httpPath=cliservice</placeholder>
+      <default-value></default-value>
+      <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.metastore.warehouse.dir</name>
+        <description>Hive Metastore directory (example: /apps/hive/warehouse)</description>
+        <label>Hive Metastore directory</label>
+        <placeholder>/apps/hive/warehouse</placeholder>
+        <default-value>/apps/hive/warehouse</default-value>
+        <cluster-config>hive-site/hive.metastore.warehouse.dir</cluster-config>
+        <required>false</required>
+    </parameter>
+
+    <!-- HDFS Configs -->
+    <parameter>
+        <name>webhdfs.url</name>
+        <description>Enter the WebHDFS FileSystem URI. Typically this is the dfs.namenode.http-address
+            property in the hdfs-site.xml configuration. URL must be accessible from Ambari
Server.</description>
+        <label>WebHDFS FileSystem URI</label>
+        <placeholder>webhdfs://namenode:50070</placeholder>
+        <required>true</required>
+        <cluster-config>core-site/fs.defaultFS</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.nameservices</name>
+        <description>Comma-separated list of nameservices. Value of hdfs-site/dfs.nameservices
property</description>
+        <label>Logical name of the NameNode cluster</label>
+        <required>false</required>
+        <cluster-config>hdfs-site/dfs.nameservices</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenodes.list</name>
+        <description>Comma-separated list of namenodes for a given nameservice.
+          Value of hdfs-site/dfs.ha.namenodes.[nameservice] property</description>
+        <label>List of NameNodes</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn1</name>
+        <description>RPC address for first name node.
+          Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.rpc-address.nn2</name>
+        <description>RPC address for second name node.
+          Value of hdfs-site/dfs.namenode.rpc-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode RPC Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn1</name>
+        <description>WebHDFS address for first name node.
+          Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode1] property</description>
+        <label>First NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.ha.namenode.http-address.nn2</name>
+        <description>WebHDFS address for second name node.
+          Value of hdfs-site/dfs.namenode.http-address.[nameservice].[namenode2] property</description>
+        <label>Second NameNode HTTP (WebHDFS) Address</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+    <parameter>
+        <name>webhdfs.client.failover.proxy.provider</name>
+        <description>The Java class that HDFS clients use to contact the Active NameNode
+          Value of hdfs-site/dfs.client.failover.proxy.provider.[nameservice] property</description>
+        <label>Failover Proxy Provider</label>
+        <required>false</required>
+        <cluster-config>fake</cluster-config>
+    </parameter>
+
+    <parameter>
+        <name>webhdfs.username</name>
+        <description>doAs for proxy user for HDFS. By default, uses the currently logged-in
Ambari user.</description>
+        <label>WebHDFS Username</label>
+        <default-value>${username}</default-value>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>webhdfs.auth</name>
+        <description>Semicolon-separated authentication configs.</description>
+        <label>WebHDFS Authentication</label>
+        <placeholder>auth=SIMPLE</placeholder>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>hdfs.umask-mode</name>
+        <description>The umask used when creating files and directories. Defaults to
022</description>
+        <label>Umask</label>
+        <default-value>022</default-value>
+        <required>false</required>
+        <cluster-config>hdfs-site/fs.permissions.umask-mode</cluster-config>
+    </parameter>
+
+    <parameter>
+        <name>hdfs.auth_to_local</name>
+        <description>Auth to Local Configuration</description>
+        <label>Auth To Local</label>
+        <required>false</required>
+        <cluster-config>core-site/hadoop.security.auth_to_local</cluster-config>
+    </parameter>
+
+    <!-- General Configs -->
+
+    <parameter>
+        <name>views.tez.instance</name>
+        <description>Instance name of Tez view.</description>
+        <label>Instance name of Tez view</label>
+        <required>false</required>
+    </parameter>
+
+    <parameter>
+        <name>scripts.dir</name>
+        <description>HDFS directory path to store Hive scripts.</description>
+        <label>Scripts HDFS Directory</label>
+        <placeholder>/user/${username}/hive/scripts</placeholder>
+        <default-value>/user/${username}/hive/scripts</default-value>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>jobs.dir</name>
+        <description>HDFS directory path to store Hive job status.</description>
+        <label>Jobs HDFS Directory</label>
+        <placeholder>/user/${username}/hive/jobs</placeholder>
+        <default-value>/user/${username}/hive/jobs</default-value>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>scripts.settings.defaults-file</name>
+        <description>File path for saving default settings for query</description>
+        <label>Default script settings file</label>
+        <default-value>/user/${username}/.${instanceName}.defaultSettings</default-value>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>yarn.ats.url</name>
+        <description>The URL to the YARN Application Timeline Server, used to provide
Jobs information, typically, this is the yarn.timeline-service.webapp.address property in
the yarn-site.xml configuration.</description>
+        <label>YARN Application Timeline Server URL</label>
+        <placeholder>http://yarn.ats.address:8188</placeholder>
+        <cluster-config>yarn-site/yarn.timeline-service.webapp.address</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>yarn.resourcemanager.url</name>
+        <description>The URL to the YARN ResourceManager, used to provide YARN Application
data. If YARN ResourceManager HA is enabled, provide a comma separated list of URLs for all
the Resource Managers.</description>
+        <label>YARN ResourceManager URL</label>
+        <placeholder>http://yarn.resourcemanager.address:8088</placeholder>
+        <cluster-config>yarn-site/yarn.resourcemanager.webapp.address</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <resource>
+        <name>savedQuery</name>
+        <plural-name>savedQueries</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive2.resources.savedQueries.SavedQuery</resource-class>
+        <provider-class>org.apache.ambari.view.hive2.resources.savedQueries.SavedQueryResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive2.resources.savedQueries.SavedQueryService</service-class>
+    </resource>
+
+    <resource>
+        <name>fileResource</name>
+        <plural-name>fileResources</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive2.resources.resources.FileResourceItem</resource-class>
+        <provider-class>org.apache.ambari.view.hive2.resources.resources.FileResourceResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive2.resources.resources.FileResourceService</service-class>
+    </resource>
+
+    <resource>
+        <name>udf</name>
+        <plural-name>udfs</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive2.resources.udfs.UDF</resource-class>
+        <provider-class>org.apache.ambari.view.hive2.resources.udfs.UDFResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive2.resources.udfs.UDFService</service-class>
+    </resource>
+
+    <resource>
+        <name>job</name>
+        <plural-name>jobs</plural-name>
+        <id-property>id</id-property>
+        <resource-class>org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobImpl</resource-class>
+        <provider-class>org.apache.ambari.view.hive2.resources.jobs.JobResourceProvider</provider-class>
+        <service-class>org.apache.ambari.view.hive2.resources.jobs.JobService</service-class>
+    </resource>
+
+    <resource>
+        <name>upload</name>
+        <plural-name>uploads</plural-name>
+        <service-class>org.apache.ambari.view.hive2.resources.uploads.UploadService</service-class>
+    </resource>
+
+    <resource>
+        <name>file</name>
+        <service-class>org.apache.ambari.view.hive2.resources.files.FileService</service-class>
+    </resource>
+
+    <resource>
+        <name>ddl</name>
+        <service-class>org.apache.ambari.view.hive2.resources.browser.HiveBrowserService</service-class>
+    </resource>
+
+    <resource>
+        <name>hive</name>
+        <service-class>org.apache.ambari.view.hive2.HelpService</service-class>
+    </resource>
+
+    <persistence>
+        <entity>
+            <class>org.apache.ambari.view.hive2.resources.jobs.viewJobs.JobImpl</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive2.resources.savedQueries.SavedQuery</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive2.resources.udfs.UDF</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive2.resources.resources.FileResourceItem</class>
+            <id-property>id</id-property>
+        </entity>
+        <entity>
+            <class>org.apache.ambari.view.hive2.TestBean</class>
+            <id-property>id</id-property>
+        </entity>
+    </persistence>
+
+    <auto-instance>
+        <name>AUTO_HIVE_INSTANCE</name>
+        <label>Hive View</label>
+        <description>This view instance is auto created when the Hive service is added
to a cluster.</description>
+        <stack-id>HDP-2.*</stack-id>
+        <services>
+            <service>HIVE</service>
+        </services>
+    </auto-instance>
+</view>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/AsyncQueriesTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/AsyncQueriesTest.java
b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/AsyncQueriesTest.java
new file mode 100644
index 0000000..38ed2b3
--- /dev/null
+++ b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/AsyncQueriesTest.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2;
+
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import org.apache.ambari.view.hive2.actor.OperationController;
+import org.apache.ambari.view.hive2.actor.message.AsyncJob;
+import org.apache.ambari.view.hive2.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive2.actor.message.FetchResult;
+import org.apache.ambari.view.hive2.actor.message.JobSubmitted;
+import org.apache.ambari.view.hive2.actor.message.job.AsyncExecutionFailed;
+import org.apache.ambari.view.hive2.actor.message.job.Next;
+import org.apache.ambari.view.hive2.internal.ConnectionException;
+import org.apache.ambari.view.hive2.internal.Either;
+import org.apache.ambari.view.hive2.internal.HiveResult;
+import org.apache.hive.jdbc.HiveStatement;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import scala.concurrent.duration.Duration;
+
+import java.sql.SQLException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.assertTrue;
+
+public class AsyncQueriesTest extends MockSupport {
+
+
+    private static ActorSystem actorSystem;
+
+    @Before
+    public void setup() {
+        actorSystem = ActorSystem.create("TestingActorSystem");
+        Logger.getRootLogger().setLevel(Level.DEBUG);
+    }
+
+    @After
+    public void teardown() {
+        JavaTestKit.shutdownActorSystem(actorSystem);
+    }
+
+
+    /**
+     * Test the actor inactivity timer
+     * Send the actor a message and dont care about the result
+     *
+     * @throws SQLException
+     * @throws ConnectionException
+     * @throws InterruptedException
+     */
+    @Test
+    @Ignore
+    public void testAsyncQuerySubmission() throws SQLException, ConnectionException, InterruptedException
{
+        mockDependencies();
+        setUpDefaultExpectations();
+        String[] statements = {"select * from test"};
+        AsyncJob job = new AsyncJob("10", "admin", statements, "tst.log", viewContext);
+        for (String s : statements) {
+            expect(((HiveStatement) statement).executeAsync(s)).andReturn(true);
+        }
+
+        ActorRef operationControl = actorSystem.actorOf(
+                Props.create(OperationController.class, actorSystem, connectionSupplier,
supplier, hdfsSupplier), "operationController-test");
+
+        Inbox inbox = Inbox.create(actorSystem);
+
+        ExecuteJob executeJob = new ExecuteJob(connect, job);
+        inbox.send(operationControl, executeJob);
+
+        replay(connection, resultSet, resultSetMetaData, statement, viewContext, connect,
connectable, hdfsSupplier, hdfsApi, supplier, connectionSupplier);
+
+        try {
+
+            Object submitted = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+
+            assertTrue(submitted instanceof JobSubmitted);
+            inbox.send(operationControl,new FetchResult("10","admin"));
+
+            Either<ActorRef, AsyncExecutionFailed> receive = (Either<ActorRef, AsyncExecutionFailed>)
inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+
+            inbox.send(receive.getLeft(),new Next());
+
+            HiveResult result = (HiveResult)inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+
+            List<HiveResult.Row> rows = result.getRows();
+            System.out.println(rows);
+
+            verify(connection, resultSet, resultSetMetaData, statement, viewContext, connect,
connectable, hdfsSupplier, hdfsApi, supplier);
+
+
+        } catch (Throwable e) {
+            e.printStackTrace();
+        }
+
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/InactivityTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/InactivityTest.java
b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/InactivityTest.java
new file mode 100644
index 0000000..16405b2
--- /dev/null
+++ b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/InactivityTest.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2;
+
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import org.apache.ambari.view.hive2.actor.OperationController;
+import org.apache.ambari.view.hive2.actor.message.AsyncJob;
+import org.apache.ambari.view.hive2.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive2.actor.message.SyncJob;
+import org.apache.ambari.view.hive2.internal.ConnectionException;
+import org.apache.hive.jdbc.HiveStatement;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.easymock.EasyMock;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.sql.SQLException;
+
+import static org.easymock.EasyMock.*;
+
+public class InactivityTest extends MockSupport {
+
+
+    private static ActorSystem actorSystem;
+
+    @BeforeClass
+    public static void setup() {
+        actorSystem = ActorSystem.create("TestingActorSystem");
+        Logger.getRootLogger().setLevel(Level.DEBUG);
+    }
+
+    @AfterClass
+    public static void teardown() {
+        JavaTestKit.shutdownActorSystem(actorSystem);
+    }
+
+
+    /**
+     * Test the actor inactivity timer
+     * Send the actor a message and dont care about the result
+     *
+     * @throws SQLException
+     * @throws ConnectionException
+     * @throws InterruptedException
+     */
+    @Test
+    @Ignore
+    public void testActorInactivityTimer() throws SQLException, ConnectionException, InterruptedException
{
+         mockDependencies();
+         setUpDefaultExpectations();
+         reset(resultSet);
+         reset(resultSetMetaData);
+         statement.close();
+         resultSet.close();
+
+
+         String[] statements = {"select * from test"};
+         AsyncJob job = new AsyncJob("100","admin", statements,"tst.log" ,viewContext);
+         for (String s : statements) {
+            expect(((HiveStatement)statement).executeAsync(s)).andReturn(true);
+         }
+
+        ActorRef operationControl = actorSystem.actorOf(
+                Props.create(OperationController.class, actorSystem, connectionSupplier,
supplier, hdfsSupplier), "operationController-test");
+
+        Inbox inbox = Inbox.create(actorSystem);
+
+        ExecuteJob executeJob = new ExecuteJob(connect, job);
+        inbox.send(operationControl, executeJob);
+
+        replay(connection, resultSet, resultSetMetaData, statement, viewContext, connect,
connectable, hdfsSupplier, hdfsApi, supplier, connectionSupplier);
+
+        //allow inactivity timer to fire
+        Thread.sleep(62000);
+
+        verify(connection, resultSet, resultSetMetaData, statement, viewContext, connect,
connectable, hdfsSupplier, hdfsApi, supplier, connectionSupplier);
+
+
+
+
+
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/Mocksupport.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/Mocksupport.java
b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/Mocksupport.java
new file mode 100644
index 0000000..b7e6320
--- /dev/null
+++ b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/Mocksupport.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2;
+
+import com.google.common.base.Optional;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive2.persistence.DataStoreStorage;
+import org.apache.ambari.view.hive2.actor.message.Connect;
+import org.apache.ambari.view.hive2.internal.Connectable;
+import org.apache.ambari.view.hive2.internal.ConnectionException;
+import org.apache.ambari.view.hive2.internal.ConnectionSupplier;
+import org.apache.ambari.view.hive2.internal.DataStorageSupplier;
+import org.apache.ambari.view.hive2.internal.HdfsApiSupplier;
+import org.apache.ambari.view.utils.hdfs.HdfsApi;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.HiveStatement;
+
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import static org.easymock.EasyMock.createNiceMock;
+import static org.easymock.EasyMock.expect;
+
+abstract class MockSupport {
+
+
+    protected HiveJdbcConnectionDelegate connectionDelegate;
+    protected HiveConnection connection;
+    protected Statement statement;
+    protected ResultSet resultSet;
+    protected DataStorageSupplier supplier;
+    protected HdfsApiSupplier hdfsSupplier;
+    protected ConnectionSupplier connectionSupplier;
+    protected HdfsApi hdfsApi;
+    protected ViewContext viewContext;
+    protected Connect connect;
+    protected ResultSetMetaData resultSetMetaData;
+    protected Connectable connectable;
+
+    public void setUpDefaultExpectations() throws SQLException, ConnectionException {
+        expect(supplier.get(viewContext)).andReturn(new DataStoreStorage(viewContext));
+        expect(hdfsSupplier.get(viewContext)).andReturn(Optional.fromNullable(hdfsApi)).anyTimes();
+        expect(connection.createStatement()).andReturn(statement);
+        expect(connect.getConnectable()).andReturn(connectable);
+        expect(connectable.isOpen()).andReturn(false);
+        Optional<HiveConnection> connectionOptional = Optional.of(connection);
+        expect(connectable.getConnection()).andReturn(connectionOptional).anyTimes();
+        expect(connectionSupplier.get(viewContext)).andReturn(connectionDelegate).times(1);
+        expect(statement.getResultSet()).andReturn(resultSet);
+        expect(resultSet.getMetaData()).andReturn(resultSetMetaData);
+        expect(resultSetMetaData.getColumnCount()).andReturn(1);
+        expect(resultSetMetaData.getColumnName(1)).andReturn("test");
+        expect(resultSet.next()).andReturn(true);
+        expect(resultSet.getObject(1)).andReturn("test");
+
+        connectable.connect();
+    }
+
+    public void mockDependencies() {
+        connectionDelegate = new HiveJdbcConnectionDelegate();
+        connection = createNiceMock(HiveConnection.class);
+        statement = createNiceMock(HiveStatement.class);
+        resultSet = createNiceMock(ResultSet.class);
+        supplier = createNiceMock(DataStorageSupplier.class);
+        hdfsSupplier = createNiceMock(HdfsApiSupplier.class);
+        connectionSupplier = createNiceMock(ConnectionSupplier.class);
+        hdfsApi = createNiceMock(HdfsApi.class);
+        viewContext = createNiceMock(ViewContext.class);
+        connect = createNiceMock(Connect.class);
+        resultSetMetaData = createNiceMock(ResultSetMetaData.class);
+        connectable = createNiceMock(Connectable.class);
+    }
+
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/SyncQueriesTest.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/SyncQueriesTest.java
b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/SyncQueriesTest.java
new file mode 100644
index 0000000..a656c4e
--- /dev/null
+++ b/contrib/views/hive-next/src/test/java/org/apache/ambari/view/hive2/SyncQueriesTest.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive2;
+
+
+import akka.actor.ActorRef;
+import akka.actor.ActorSystem;
+import akka.actor.Inbox;
+import akka.actor.Props;
+import akka.testkit.JavaTestKit;
+import org.apache.ambari.view.hive2.client.Row;
+import org.apache.ambari.view.hive2.actor.OperationController;
+import org.apache.ambari.view.hive2.actor.message.ExecuteJob;
+import org.apache.ambari.view.hive2.actor.message.SyncJob;
+import org.apache.ambari.view.hive2.actor.message.job.ExecutionFailed;
+import org.apache.ambari.view.hive2.actor.message.job.FetchFailed;
+import org.apache.ambari.view.hive2.actor.message.job.Next;
+import org.apache.ambari.view.hive2.actor.message.job.NoMoreItems;
+import org.apache.ambari.view.hive2.actor.message.job.NoResult;
+import org.apache.ambari.view.hive2.actor.message.job.Result;
+import org.apache.ambari.view.hive2.actor.message.job.ResultSetHolder;
+import org.apache.ambari.view.hive2.internal.ConnectionException;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import scala.concurrent.duration.Duration;
+
+import java.sql.SQLException;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.*;
+
+public class SyncQueriesTest extends MockSupport {
+
+
+    private  ActorSystem actorSystem;
+
+    @Before
+    public void setup() {
+        actorSystem = ActorSystem.create("TestingActorSystem");
+        Logger.getRootLogger().setLevel(Level.DEBUG);
+    }
+
+    @After
+    public void teardown() {
+        JavaTestKit.shutdownActorSystem(actorSystem);
+    }
+
+
+
+    @Test
+    @Ignore
+    public void testSyncJobSubmission() throws SQLException, ConnectionException, InterruptedException
{
+        mockDependencies();
+        setUpDefaultExpectations();
+        String[] statements = {"select * from test"};
+        SyncJob job = new SyncJob("admin", statements,viewContext);
+        for (String s : statements) {
+            expect(statement.execute(s)).andReturn(true);
+        }
+
+        ActorRef operationControl = actorSystem.actorOf(
+                Props.create(OperationController.class, actorSystem, connectionSupplier,
supplier, hdfsSupplier), "operationController-test");
+
+        Inbox inbox = Inbox.create(actorSystem);
+
+        ExecuteJob executeJob = new ExecuteJob(connect, job);
+        inbox.send(operationControl, executeJob);
+
+        replay(connection, resultSet, resultSetMetaData, statement, viewContext, connect,
connectable, hdfsSupplier, hdfsApi, supplier, connectionSupplier);
+
+        try {
+
+            Object jdbcResult = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+
+            if (jdbcResult instanceof NoResult) {
+                fail();
+            } else if (jdbcResult instanceof ExecutionFailed) {
+
+                ExecutionFailed error = (ExecutionFailed) jdbcResult;
+                fail();
+                error.getError().printStackTrace();
+
+            } else if (jdbcResult instanceof ResultSetHolder) {
+                ResultSetHolder holder = (ResultSetHolder) jdbcResult;
+                ActorRef iterator = holder.getIterator();
+
+                inbox.send(iterator, new Next());
+                Object receive = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+
+
+                Result result = (Result) receive;
+                List<Row> rows = result.getRows();
+                System.out.println("Fetched " + rows.size() + " entries.");
+                for (Row row : rows) {
+                    assertArrayEquals(row.getRow(), new String[]{"test"});
+                }
+
+                inbox.send(iterator, new Next());
+                receive = inbox.receive(Duration.create(1, TimeUnit.MINUTES));
+                assertTrue(receive instanceof NoMoreItems);
+
+
+                if (receive instanceof FetchFailed) {
+                    fail();
+                }
+
+            }
+
+        } catch (Throwable ex) {
+            fail();
+        }
+
+
+        verify(connection, resultSet, resultSetMetaData, statement, viewContext, connect,
connectable, hdfsSupplier, hdfsApi, supplier);
+
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/contrib/views/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pom.xml b/contrib/views/pom.xml
index 6f3ecf9..0114013 100644
--- a/contrib/views/pom.xml
+++ b/contrib/views/pom.xml
@@ -45,6 +45,7 @@
     <module>storm</module>
     <module>zeppelin</module>
     <module>hueambarimigration</module>
+    <module>hive-next</module>
   </modules>
   <build>
     <pluginManagement>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3df0252/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 31fbec5..13c165b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -315,6 +315,8 @@
             <exclude>contrib/views/commons/src/main/resources/ui/*/tests/**/vendor/**</exclude>
             <exclude>contrib/views/hive/src/main/resources/ui/hive-web/vendor/codemirror/**</exclude>
             <exclude>contrib/views/hive/src/main/resources/ui/hive-web/.bowerrc</exclude>
+            <exclude>contrib/views/hive-next/src/main/resources/ui/hive-web/vendor/codemirror/**</exclude>
+            <exclude>contrib/views/hive-next/src/main/resources/ui/hive-web/.bowerrc</exclude>
             <exclude>contrib/views/files/src/main/resources/ui/.bowerrc</exclude>
             <exclude>contrib/views/files/src/main/resources/ui/bower_components/**</exclude>
             <exclude>contrib/views/files/src/main/resources/ui/node/**</exclude>


Mime
View raw message