ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From alexantone...@apache.org
Subject [12/12] ambari git commit: AMBARI-9676. Initial Hive View Submission to Contrib (alexantonenko)
Date Tue, 24 Feb 2015 16:32:05 GMT
AMBARI-9676. Initial Hive View Submission to Contrib (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f5482d89
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f5482d89
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f5482d89

Branch: refs/heads/branch-2.0.0
Commit: f5482d89da0481f642dda58e7dae4e0fe76d64bc
Parents: 828d85d
Author: Alex Antonenko <hiveww@gmail.com>
Authored: Tue Feb 24 18:28:59 2015 +0200
Committer: Alex Antonenko <hiveww@gmail.com>
Committed: Tue Feb 24 18:31:21 2015 +0200

----------------------------------------------------------------------
 contrib/views/hive/pom.xml                      | 306 ++++++++++++
 .../apache/ambari/view/hive/BaseService.java    |  58 +++
 .../apache/ambari/view/hive/HelpService.java    |  72 +++
 .../org/apache/ambari/view/hive/TestBean.java   |  36 ++
 .../backgroundjobs/BackgroundJobController.java |  80 ++++
 .../view/hive/client/ColumnDescription.java     |  30 ++
 .../hive/client/ColumnDescriptionExtended.java  | 103 ++++
 .../hive/client/ColumnDescriptionShort.java     |  72 +++
 .../ambari/view/hive/client/Connection.java     | 401 ++++++++++++++++
 .../ambari/view/hive/client/ConnectionPool.java |  81 ++++
 .../apache/ambari/view/hive/client/Cursor.java  | 233 +++++++++
 .../ambari/view/hive/client/DDLDelegator.java   | 140 ++++++
 .../ambari/view/hive/client/HiveCall.java       |  68 +++
 .../view/hive/client/HiveClientException.java   |  25 +
 .../hive/client/HiveClientRuntimeException.java |  25 +
 .../hive/client/HiveErrorStatusException.java   |  30 ++
 .../ambari/view/hive/client/LogsCursor.java     |  36 ++
 .../org/apache/ambari/view/hive/client/Row.java |  50 ++
 .../apache/ambari/view/hive/client/Utils.java   |  72 +++
 .../view/hive/client/ViewSessionState.java      |  32 ++
 .../view/hive/persistence/DataStoreStorage.java | 186 ++++++++
 .../persistence/InstanceKeyValueStorage.java    | 140 ++++++
 .../view/hive/persistence/KeyValueStorage.java  | 162 +++++++
 .../hive/persistence/LocalKeyValueStorage.java  |  78 +++
 .../persistence/PersistentConfiguration.java    |  52 ++
 .../ambari/view/hive/persistence/Storage.java   |  86 ++++
 .../utils/ContextConfigurationAdapter.java      | 260 ++++++++++
 .../persistence/utils/FilteringStrategy.java    |  31 ++
 .../view/hive/persistence/utils/Indexed.java    |  36 ++
 .../hive/persistence/utils/ItemNotFound.java    |  25 +
 .../utils/OnlyOwnersFilteringStrategy.java      |  33 ++
 .../view/hive/persistence/utils/Owned.java      |  36 ++
 .../persistence/utils/PersonalResource.java     |  22 +
 .../hive/persistence/utils/StorageUtil.java     |  94 ++++
 .../hive/resources/CRUDResourceManager.java     | 133 ++++++
 .../resources/PersonalCRUDResourceManager.java  | 109 +++++
 .../resources/SharedCRUDResourceManager.java    |  50 ++
 .../resources/browser/HiveBrowserService.java   | 263 ++++++++++
 .../view/hive/resources/files/FileResource.java |  70 +++
 .../view/hive/resources/files/FileService.java  | 185 ++++++++
 .../resources/jobs/ConnectionController.java    |  62 +++
 .../hive/resources/jobs/FileResourceShort.java  |  55 +++
 .../ambari/view/hive/resources/jobs/Job.java    |  90 ++++
 .../view/hive/resources/jobs/JobController.java |  46 ++
 .../resources/jobs/JobControllerFactory.java    |  43 ++
 .../hive/resources/jobs/JobControllerImpl.java  | 326 +++++++++++++
 .../view/hive/resources/jobs/JobImpl.java       | 203 ++++++++
 .../hive/resources/jobs/JobResourceManager.java | 107 +++++
 .../resources/jobs/JobResourceProvider.java     | 108 +++++
 .../view/hive/resources/jobs/JobService.java    | 355 ++++++++++++++
 .../view/hive/resources/jobs/LogParser.java     | 113 +++++
 .../jobs/ModifyNotificationDelegate.java        |  23 +
 .../ModifyNotificationInvocationHandler.java    |  40 ++
 .../jobs/NoOperationStatusSetException.java     |  26 +
 .../jobs/OperationHandleController.java         | 113 +++++
 .../jobs/OperationHandleControllerFactory.java  |  52 ++
 .../jobs/OperationHandleResourceManager.java    |  85 ++++
 .../jobs/ResultsPaginationController.java       | 180 +++++++
 .../resources/jobs/StoredOperationHandle.java   | 146 ++++++
 .../resources/resources/FileResourceItem.java   |  78 +++
 .../resources/FileResourceResourceManager.java  |  64 +++
 .../resources/FileResourceResourceProvider.java | 107 +++++
 .../resources/FileResourceService.java          | 181 +++++++
 .../hive/resources/savedQueries/SavedQuery.java |  96 ++++
 .../savedQueries/SavedQueryResourceManager.java | 132 ++++++
 .../SavedQueryResourceProvider.java             | 104 ++++
 .../savedQueries/SavedQueryService.java         | 184 +++++++
 .../ambari/view/hive/resources/udfs/UDF.java    |  87 ++++
 .../hive/resources/udfs/UDFResourceManager.java |  64 +++
 .../resources/udfs/UDFResourceProvider.java     | 107 +++++
 .../view/hive/resources/udfs/UDFService.java    | 196 ++++++++
 .../utils/BadRequestFormattedException.java     |  27 ++
 .../ambari/view/hive/utils/FilePaginator.java   | 127 +++++
 .../apache/ambari/view/hive/utils/HdfsApi.java  | 399 ++++++++++++++++
 .../apache/ambari/view/hive/utils/HdfsUtil.java |  87 ++++
 .../MisconfigurationFormattedException.java     |  47 ++
 .../hive/utils/NotFoundFormattedException.java  |  27 ++
 .../hive/utils/ServiceFormattedException.java   |  78 +++
 .../src/main/resources/ui/hive-web/.gitignore   |  35 ++
 .../src/main/resources/ui/hive-web/.jshintrc    |  32 ++
 .../src/main/resources/ui/hive-web/Brocfile.js  |  46 ++
 .../src/main/resources/ui/hive-web/README.md    |  14 +
 .../ui/hive-web/app/adapters/application.js     |  44 ++
 .../ui/hive-web/app/adapters/database.js        |  25 +
 .../resources/ui/hive-web/app/adapters/file.js  |  26 +
 .../src/main/resources/ui/hive-web/app/app.js   |  34 ++
 .../ui/hive-web/app/components/.gitkeep         |   0
 .../app/components/alert-message-widget.js      |  35 ++
 .../app/components/collapsible-widget.js        |  33 ++
 .../app/components/column-filter-widget.js      |  56 +++
 .../app/components/date-range-widget.js         |  62 +++
 .../hive-web/app/components/expander-widget.js  |  30 ++
 .../hive-web/app/components/extended-input.js   |  50 ++
 .../ui/hive-web/app/components/modal-widget.js  |  34 ++
 .../ui/hive-web/app/components/no-bubbling.js   |  31 ++
 .../app/components/number-range-widget.js       |  52 ++
 .../ui/hive-web/app/components/panel-widget.js  |  30 ++
 .../hive-web/app/components/popover-widget.js   |  33 ++
 .../hive-web/app/components/progress-widget.js  |  45 ++
 .../ui/hive-web/app/components/query-editor.js  | 121 +++++
 .../ui/hive-web/app/components/select-widget.js |  66 +++
 .../ui/hive-web/app/components/tabs-widget.js   |  64 +++
 .../ui/hive-web/app/components/tree-view.js     |  23 +
 .../hive-web/app/components/typeahead-widget.js |  44 ++
 .../ui/hive-web/app/controllers/.gitkeep        |   0
 .../ui/hive-web/app/controllers/alerts.js       |  47 ++
 .../ui/hive-web/app/controllers/columns.js      | 104 ++++
 .../ui/hive-web/app/controllers/databases.js    | 354 ++++++++++++++
 .../hive-web/app/controllers/file-resources.js  |  22 +
 .../ui/hive-web/app/controllers/history.js      | 148 ++++++
 .../ui/hive-web/app/controllers/index.js        | 475 +++++++++++++++++++
 .../controllers/index/history-query/explain.js  | 119 +++++
 .../app/controllers/index/history-query/logs.js |  95 ++++
 .../controllers/index/history-query/results.js  | 134 ++++++
 .../ui/hive-web/app/controllers/insert-udfs.js  |  58 +++
 .../ui/hive-web/app/controllers/job.js          |  49 ++
 .../ui/hive-web/app/controllers/loaded-files.js |  65 +++
 .../ui/hive-web/app/controllers/modal-delete.js |  33 ++
 .../ui/hive-web/app/controllers/modal-save.js   |  33 ++
 .../ui/hive-web/app/controllers/open-queries.js | 320 +++++++++++++
 .../ui/hive-web/app/controllers/queries.js      | 107 +++++
 .../ui/hive-web/app/controllers/settings.js     | 150 ++++++
 .../ui/hive-web/app/controllers/tables.js       |  93 ++++
 .../ui/hive-web/app/controllers/udf.js          | 138 ++++++
 .../ui/hive-web/app/controllers/udfs.js         |  69 +++
 .../resources/ui/hive-web/app/helpers/.gitkeep  |   0
 .../ui/hive-web/app/helpers/code-helper.js      |  28 ++
 .../ui/hive-web/app/helpers/date-binding.js     |  27 ++
 .../ui/hive-web/app/helpers/log-helper.js       |  28 ++
 .../ui/hive-web/app/helpers/path-binding.js     |  29 ++
 .../ui/hive-web/app/helpers/tb-helper.js        |  33 ++
 .../main/resources/ui/hive-web/app/index.html   |  38 ++
 .../ui/hive-web/app/initializers/i18n.js        | 160 +++++++
 .../ui/hive-web/app/mixins/filterable.js        | 104 ++++
 .../ui/hive-web/app/mixins/sortable.js          |  31 ++
 .../resources/ui/hive-web/app/models/.gitkeep   |   0
 .../ui/hive-web/app/models/database.js          |  25 +
 .../ui/hive-web/app/models/file-resource.js     |  25 +
 .../resources/ui/hive-web/app/models/file.js    |  26 +
 .../resources/ui/hive-web/app/models/job.js     |  32 ++
 .../ui/hive-web/app/models/saved-query.js       |  29 ++
 .../resources/ui/hive-web/app/models/udf.js     |  27 ++
 .../main/resources/ui/hive-web/app/router.js    |  47 ++
 .../resources/ui/hive-web/app/routes/.gitkeep   |   0
 .../ui/hive-web/app/routes/application.js       |  62 +++
 .../resources/ui/hive-web/app/routes/history.js |  30 ++
 .../app/routes/index/history-query/explain.js   |  28 ++
 .../app/routes/index/history-query/index.js     |  37 ++
 .../app/routes/index/history-query/logs.js      |  28 ++
 .../app/routes/index/history-query/results.js   |  28 ++
 .../ui/hive-web/app/routes/index/index.js       |  36 ++
 .../ui/hive-web/app/routes/index/saved-query.js |  33 ++
 .../resources/ui/hive-web/app/routes/loading.js |  22 +
 .../resources/ui/hive-web/app/routes/queries.js |  30 ++
 .../resources/ui/hive-web/app/routes/udfs.js    |  30 ++
 .../ui/hive-web/app/serializers/database.js     |  41 ++
 .../ui/hive-web/app/serializers/file.js         |  23 +
 .../resources/ui/hive-web/app/styles/.gitkeep   |   0
 .../resources/ui/hive-web/app/styles/app.scss   | 327 +++++++++++++
 .../hive-web/app/styles/dropdown-submenu.scss   |  66 +++
 .../ui/hive-web/app/templates/.gitkeep          |   0
 .../ui/hive-web/app/templates/alerts.hbs        |  23 +
 .../ui/hive-web/app/templates/application.hbs   |  25 +
 .../hive-web/app/templates/components/.gitkeep  |   0
 .../components/alert-message-widget.hbs         |  28 ++
 .../templates/components/collapsible-widget.hbs |  24 +
 .../components/column-filter-widget.hbs         |  42 ++
 .../templates/components/date-range-widget.hbs  |  22 +
 .../templates/components/expander-widget.hbs    |  31 ++
 .../app/templates/components/modal-widget.hbs   |  35 ++
 .../app/templates/components/no-bubbling.hbs    |  19 +
 .../components/number-range-widget.hbs          |  23 +
 .../app/templates/components/panel-widget.hbs   |  46 ++
 .../app/templates/components/popover-widget.hbs |  19 +
 .../templates/components/progress-widget.hbs    |  25 +
 .../app/templates/components/query-editor.hbs   |  19 +
 .../app/templates/components/select-widget.hbs  |  39 ++
 .../app/templates/components/tabs-widget.hbs    |  40 ++
 .../app/templates/components/tree-view.hbs      |  28 ++
 .../templates/components/typeahead-widget.hbs   |  17 +
 .../app/templates/databases-search-results.hbs  |  48 ++
 .../hive-web/app/templates/databases-tree.hbs   |  48 ++
 .../ui/hive-web/app/templates/databases.hbs     |  54 +++
 .../ui/hive-web/app/templates/history.hbs       |  70 +++
 .../ui/hive-web/app/templates/index.hbs         |  82 ++++
 .../templates/index/history-query/explain.hbs   |  23 +
 .../app/templates/index/history-query/logs.hbs  |  19 +
 .../templates/index/history-query/results.hbs   |  47 ++
 .../ui/hive-web/app/templates/insert-udfs.hbs   |  44 ++
 .../ui/hive-web/app/templates/loading.hbs       |  19 +
 .../ui/hive-web/app/templates/logs.hbs          |  19 +
 .../ui/hive-web/app/templates/modal-delete.hbs  |  21 +
 .../ui/hive-web/app/templates/modal-save.hbs    |  21 +
 .../ui/hive-web/app/templates/navbar.hbs        |  45 ++
 .../ui/hive-web/app/templates/open-queries.hbs  |  23 +
 .../ui/hive-web/app/templates/queries.hbs       |  78 +++
 .../ui/hive-web/app/templates/redirect.hbs      |  19 +
 .../ui/hive-web/app/templates/settings.hbs      |  52 ++
 .../ui/hive-web/app/templates/udfs.hbs          | 112 +++++
 .../ui/hive-web/app/transforms/date.js          |  49 ++
 .../ui/hive-web/app/utils/constants.js          | 122 +++++
 .../ui/hive-web/app/utils/functions.js          |  36 ++
 .../resources/ui/hive-web/app/views/.gitkeep    |   0
 .../resources/ui/hive-web/app/views/history.js  |  25 +
 .../ui/hive-web/app/views/insert-udfs.js        |  23 +
 .../resources/ui/hive-web/app/views/navbar.js   |  38 ++
 .../main/resources/ui/hive-web/big_tables.js    |  54 +++
 .../src/main/resources/ui/hive-web/bower.json   |  27 ++
 .../resources/ui/hive-web/config/environment.js |  65 +++
 .../src/main/resources/ui/hive-web/package.json |  45 ++
 .../src/main/resources/ui/hive-web/testem.json  |  11 +
 .../main/resources/ui/hive-web/tests/.jshintrc  |  74 +++
 .../ui/hive-web/tests/blanket-options.js        |  25 +
 .../ui/hive-web/tests/helpers/api-mock.js       | 242 ++++++++++
 .../ui/hive-web/tests/helpers/resolver.js       |  29 ++
 .../ui/hive-web/tests/helpers/start-app.js      |  43 ++
 .../resources/ui/hive-web/tests/img/spinner.gif | Bin 0 -> 3289 bytes
 .../main/resources/ui/hive-web/tests/index.html |  64 +++
 .../hive-web/tests/integration/database-test.js | 105 ++++
 .../hive-web/tests/integration/history-test.js  |  95 ++++
 .../tests/integration/query-editor-test.js      | 108 +++++
 .../tests/integration/saved-queries-test.js     | 126 +++++
 .../ui/hive-web/tests/integration/udfs-test.js  |  91 ++++
 .../resources/ui/hive-web/tests/test-helper.js  |  30 ++
 .../resources/ui/hive-web/tests/unit/.gitkeep   |   0
 .../hive-web/tests/unit/adapters/application.js |  48 ++
 .../ui/hive-web/tests/unit/adapters/file.js     |  39 ++
 .../components/alert-message-widget-test.js     |  91 ++++
 .../unit/components/collapsible-widget-test.js  |  44 ++
 .../components/column-filter-widget-test.js     | 138 ++++++
 .../unit/components/date-range-widget-test.js   | 130 +++++
 .../unit/components/expander-widget-test.js     |  58 +++
 .../unit/components/extended-input-test.js      |  79 +++
 .../tests/unit/components/no-bubbling-test.js   |  43 ++
 .../unit/components/number-range-widget-test.js |  72 +++
 .../unit/components/popover-widget-test.js      |  34 ++
 .../unit/components/progress-widget-test.js     |  45 ++
 .../tests/unit/components/query-editor-test.js  |  50 ++
 .../tests/unit/components/select-widget-test.js | 158 ++++++
 .../tests/unit/components/tabs-wiget-test.js    | 117 +++++
 .../unit/components/typeahead-widget-test.js    |  45 ++
 .../hive-web/tests/unit/controllers/columns.js  |  32 ++
 .../tests/unit/controllers/databases-test.js    | 214 +++++++++
 .../tests/unit/controllers/history-test.js      | 117 +++++
 .../tests/unit/controllers/index-test.js        | 179 +++++++
 .../tests/unit/controllers/insert-udfs-test.js  |  68 +++
 .../hive-web/tests/unit/controllers/job-test.js |  62 +++
 .../tests/unit/controllers/open-queries-test.js | 103 ++++
 .../tests/unit/controllers/queries-test.js      |  47 ++
 .../hive-web/tests/unit/controllers/tables.js   |  32 ++
 .../hive-web/tests/unit/controllers/udf-test.js |  94 ++++
 .../tests/unit/controllers/udfs-test.js         |  62 +++
 .../tests/unit/helpers/path-binding-test.js     |  35 ++
 .../main/resources/ui/hive-web/vendor/.gitkeep  |   0
 .../vendor/codemirror/codemirror-min.js         |  17 +
 .../hive-web/vendor/codemirror/codemirror.css   | 309 ++++++++++++
 .../ui/hive-web/vendor/codemirror/show-hint.css |  38 ++
 .../ui/hive-web/vendor/codemirror/show-hint.js  | 389 +++++++++++++++
 .../ui/hive-web/vendor/codemirror/sql-hint.js   | 192 ++++++++
 contrib/views/hive/src/main/resources/view.xml  | 160 +++++++
 .../apache/ambari/view/hive/BaseHiveTest.java   | 103 ++++
 .../org/apache/ambari/view/hive/HDFSTest.java   |  64 +++
 .../ambari/view/hive/ServiceTestUtils.java      |  63 +++
 .../BackgroundJobControllerTest.java            |  77 +++
 .../hive/resources/files/FileServiceTest.java   | 214 +++++++++
 .../hive/resources/jobs/JobServiceTest.java     | 210 ++++++++
 .../view/hive/resources/jobs/LogParserTest.java |  73 +++
 .../resources/FileResourceServiceTest.java      | 119 +++++
 .../savedQueries/SavedQueryServiceTest.java     | 177 +++++++
 .../hive/resources/udfs/UDFServiceTest.java     | 119 +++++
 .../ambari/view/hive/utils/HdfsApiMock.java     |  84 ++++
 .../utils/SeekableByteArrayInputStream.java     |  71 +++
 contrib/views/pom.xml                           |   1 +
 pom.xml                                         |   1 +
 274 files changed, 21147 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/pom.xml b/contrib/views/hive/pom.xml
new file mode 100644
index 0000000..0ed6af4
--- /dev/null
+++ b/contrib/views/hive/pom.xml
@@ -0,0 +1,306 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari.contrib.views</groupId>
+  <artifactId>hive</artifactId>
+  <version>0.1.0-SNAPSHOT</version>
+  <name>Hive</name>
+
+  <parent>
+    <groupId>org.apache.ambari.contrib.views</groupId>
+    <artifactId>ambari-contrib-views</artifactId>
+    <version>2.0.0-SNAPSHOT</version>
+  </parent>
+
+  <dependencies>
+    <dependency>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>jersey-multipart</artifactId>
+      <version>1.18</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-client</artifactId>
+      <version>1.8</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+      <version>1.18.1</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+      <version>1.9</version>
+    </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-configuration</groupId>
+      <artifactId>commons-configuration</artifactId>
+      <version>1.6</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-csv</artifactId>
+      <version>1.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-collections4</artifactId>
+      <version>4.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.ambari</groupId>
+      <artifactId>ambari-views</artifactId>
+      <scope>provided</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.2.2</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.servlet</groupId>
+      <artifactId>javax.servlet-api</artifactId>
+      <version>3.0.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+      <version>1.7.5</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-common</artifactId>
+      <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-mapreduce-client-core</artifactId>
+      <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-client</artifactId>
+      <version>${hadoop-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>javax.ws.rs</groupId>
+      <artifactId>javax.ws.rs-api</artifactId>
+      <version>2.0</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-service</artifactId>
+      <version>${hive-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-common</artifactId>
+      <version>${hive-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+      <version>${hive-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+      <version>${hive-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-exec</artifactId>
+      <version>${hive-version}</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+      <version>1.2</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+      <version>2.2</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+      <version>0.9.0</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.easymock</groupId>
+      <artifactId>easymock</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
+      <version>${hadoop-version}</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <properties>
+    <hadoop-version>2.6.0</hadoop-version>
+    <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
+    <hive-version>1.0.0</hive-version>
+    <ambari.version>1.3.0-SNAPSHOT</ambari.version>
+  </properties>
+  <build>
+    <plugins>
+
+      <!-- Building frontend -->
+      <plugin>
+        <groupId>com.github.eirslett</groupId>
+        <artifactId>frontend-maven-plugin</artifactId>
+        <version>0.0.14</version>
+        <configuration>
+          <nodeVersion>v0.10.26</nodeVersion>
+          <npmVersion>1.4.3</npmVersion>
+          <workingDirectory>src/main/resources/ui/hive-web/</workingDirectory>
+        </configuration>
+        <executions>
+          <execution>
+            <id>install node and npm</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>install-node-and-npm</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>npm install</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>npm</goal>
+            </goals>
+            <configuration>
+              <arguments>install --python="${project.basedir}/../src/main/unix/ambari-python-wrap" --unsafe-perm</arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <version>1.3.2</version>
+        <executions>
+          <execution>
+            <id>Hive build</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>exec</goal>
+            </goals>
+            <configuration>
+              <workingDirectory>${basedir}/src/main/resources/ui/hive-web</workingDirectory>
+              <executable>node/node</executable>
+              <arguments>
+                <argument>node_modules/.bin/ember</argument>
+                <argument>build</argument>
+              </arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>3.1</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>generate-resources</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <outputDirectory>${project.build.directory}/lib</outputDirectory>
+              <includeScope>runtime</includeScope>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+       <groupId>org.vafer</groupId>
+       <artifactId>jdeb</artifactId>
+       <version>1.0.1</version>
+       <executions>
+           <execution>
+               <phase>none</phase>
+               <goals>
+                   <goal>jdeb</goal>
+               </goals>
+           </execution>
+       </executions>
+       <configuration>
+           <submodules>false</submodules>
+       </configuration>
+     </plugin>
+    </plugins>
+    <resources>
+      <resource>
+        <directory>src/main/resources</directory>
+        <filtering>false</filtering>
+        <includes>
+          <include>META-INF/**/*</include>
+          <include>view.xml</include>
+        </includes>
+      </resource>
+      <resource>
+        <directory>src/main/resources/ui/hive-web/dist</directory>
+        <filtering>false</filtering>
+      </resource>
+      <resource>
+        <targetPath>WEB-INF/lib</targetPath>
+        <filtering>false</filtering>
+        <directory>target/lib</directory>
+      </resource>
+    </resources>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
new file mode 100644
index 0000000..e28193d
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/BaseService.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.persistence.Storage;
+import org.apache.ambari.view.hive.persistence.utils.StorageUtil;
+import org.apache.ambari.view.hive.utils.HdfsApi;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+/**
+ * Parent service
+ */
+public class BaseService {
+  @Inject
+  protected ViewContext context;
+
+  protected final static Logger LOG =
+      LoggerFactory.getLogger(BaseService.class);
+
+  private Storage storage;
+  protected Storage getStorage() {
+    if (storage == null) {
+      storage = StorageUtil.getInstance(context).getStorage();
+    }
+    return storage;
+  }
+
+  private HdfsApi hdfsApi = null;
+  protected HdfsApi getHdfsApi()  {
+    if (hdfsApi == null)
+      hdfsApi = HdfsApi.getInstance(context);
+    return hdfsApi;
+  }
+
+  public BaseService() {
+//    Thread.currentThread().setContextClassLoader(null);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
new file mode 100644
index 0000000..f975393
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/HelpService.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive;
+
+import com.google.inject.Inject;
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.ViewResourceHandler;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+/**
+ * Help service
+ */
+public class HelpService extends BaseService {
+  @Inject
+  ViewContext context;
+
+  @Inject
+  protected ViewResourceHandler handler;
+
+  /**
+   * Constructor
+   */
+  public HelpService() {
+    super();
+  }
+
+  /**
+   * Version
+   * @return version
+   */
+  @GET
+  @Path("/version")
+  @Produces(MediaType.TEXT_PLAIN)
+  public Response version(){
+    return Response.ok("0.0.1-SNAPSHOT").build();
+  }
+
+  /**
+   * Version
+   * @return version
+   */
+  @GET
+  @Path("/test")
+  @Produces(MediaType.TEXT_PLAIN)
+  public Response testStorage(){
+    TestBean test = new TestBean();
+    test.someData = "hello world";
+    getStorage().store(TestBean.class, test);
+    return Response.ok("OK").build();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
new file mode 100644
index 0000000..fce0177
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/TestBean.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive;
+
+import org.apache.ambari.view.hive.persistence.utils.Indexed;
+
+public class TestBean implements Indexed {
+  public String someData;
+  public Integer id;
+
+  @Override
+  public Integer getId() {
+    return id;
+  }
+
+  @Override
+  public void setId(Integer id) {
+    this.id = id;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java
new file mode 100644
index 0000000..6bc284e
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/backgroundjobs/BackgroundJobController.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.backgroundjobs;
+
+import org.apache.ambari.view.ViewContext;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class BackgroundJobController {
+  private ViewContext context;
+
+  protected BackgroundJobController(ViewContext context) {
+    this.context = context;
+  }
+
+  private static Map<String, BackgroundJobController> viewSingletonObjects = new HashMap<String, BackgroundJobController>();
+  public static BackgroundJobController getInstance(ViewContext context) {
+    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
+      viewSingletonObjects.put(context.getInstanceName(), new BackgroundJobController(context));
+    return viewSingletonObjects.get(context.getInstanceName());
+  }
+
+  private Map<String, Thread> jobs = new HashMap<String, Thread>();
+  public void startJob(String key, Runnable runnable) {
+    if (jobs.containsKey(key)) {
+      interrupt(key);
+      try {
+        jobs.get(key).join();
+      } catch (InterruptedException ignored) {
+      }
+    }
+    Thread t = new Thread(runnable);
+    jobs.put(key, t);
+    t.start();
+  }
+
+  public Thread.State state(String key) {
+    if (!jobs.containsKey(key))
+      return Thread.State.TERMINATED;
+
+    Thread.State state = jobs.get(key).getState();
+
+    if (state == Thread.State.TERMINATED)
+      jobs.remove(key);
+
+    return state;
+  }
+
+  public boolean interrupt(String key) {
+    if (!jobs.containsKey(key))
+      return false;
+
+    jobs.get(key).interrupt();
+    return true;
+  }
+
+  public boolean isInterrupted(String key) {
+    if (state(key) == Thread.State.TERMINATED)
+      return true;
+
+    return jobs.get(key).isInterrupted();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescription.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescription.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescription.java
new file mode 100644
index 0000000..d7ea560
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescription.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+public interface ColumnDescription {
+  public abstract String getName();
+  public abstract void setName(String name);
+
+  public abstract String getType();
+  public abstract void setType(String type);
+
+  public abstract int getPosition();
+  public abstract void setPosition(int position);
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionExtended.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionExtended.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionExtended.java
new file mode 100644
index 0000000..ab54e61
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionExtended.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+public class ColumnDescriptionExtended implements ColumnDescription {
+  private String name;
+  private String type;
+  private int position;
+  private String comment;
+  private boolean partitioned;
+  private boolean sortedBy;
+  private boolean clusteredBy;
+
+  private ColumnDescriptionExtended(String name, String type, String comment, boolean partitioned,
+                                   boolean sortedBy, boolean clusteredBy, int position) {
+    setName(name);
+    setType(type);
+    setPosition(position);
+    setComment(comment);
+    setPartitioned(partitioned);
+    setSortedBy(sortedBy);
+    setClusteredBy(clusteredBy);
+  }
+
+  public static ColumnDescription createExtendedColumnDescription(String name, String type, String comment,
+                                                                  boolean partitioned, boolean sortedBy, boolean clusteredBy,
+                                                                  int position) {
+    return new ColumnDescriptionExtended(name, type, comment, partitioned, sortedBy, clusteredBy, position);
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public int getPosition() {
+    return position;
+  }
+
+  public void setPosition(int position) {
+    this.position = position;
+  }
+
+  public String getComment() {
+    return comment;
+  }
+
+  public void setComment(String comment) {
+    this.comment = comment;
+  }
+
+  public boolean isPartitioned() {
+    return partitioned;
+  }
+
+  public void setPartitioned(boolean partitioned) {
+    this.partitioned = partitioned;
+  }
+
+  public boolean isSortedBy() {
+    return sortedBy;
+  }
+
+  public void setSortedBy(boolean sortedBy) {
+    this.sortedBy = sortedBy;
+  }
+
+  public boolean isClusteredBy() {
+    return clusteredBy;
+  }
+
+  public void setClusteredBy(boolean clusteredBy) {
+    this.clusteredBy = clusteredBy;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionShort.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionShort.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionShort.java
new file mode 100644
index 0000000..a6500aa
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ColumnDescriptionShort.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import java.util.ArrayList;
+
+public class ColumnDescriptionShort extends ArrayList<Object> implements ColumnDescription {
+  public static final int INITIAL_CAPACITY = 3;
+  public static final int NAME_INDEX = 0;
+  public static final int TYPE_INDEX = 1;
+  public static final int POSITION_INDEX = 2;
+
+  private ColumnDescriptionShort(String name, String type, int position) {
+    super(INITIAL_CAPACITY);
+    this.add(null);
+    this.add(null);
+    this.add(null);
+    setName(name);
+    setType(type);
+    setPosition(position);
+  }
+
+  public static ColumnDescription createShortColumnDescription(String name, String type, int position) {
+    return new ColumnDescriptionShort(name, type, position);
+  }
+
+  @Override
+  public String getName() {
+    return (String) this.get(NAME_INDEX);
+  }
+
+  @Override
+  public void setName(String name) {
+    this.set(NAME_INDEX, name);
+  }
+
+  @Override
+  public String getType() {
+    return (String) this.get(TYPE_INDEX);
+  }
+
+  @Override
+  public void setType(String type) {
+    this.set(TYPE_INDEX, type);
+  }
+
+  @Override
+  public int getPosition() {
+    return (Integer) this.get(POSITION_INDEX);
+  }
+
+  @Override
+  public void setPosition(int position) {
+    this.set(POSITION_INDEX, position);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
new file mode 100644
index 0000000..e713aba
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
@@ -0,0 +1,401 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.hadoop.hive.shims.ShimLoader;
+import org.apache.hive.service.auth.HiveAuthFactory;
+import org.apache.hive.service.auth.KerberosSaslHelper;
+import org.apache.hive.service.auth.PlainSaslHelper;
+import org.apache.hive.service.auth.SaslQOP;
+import org.apache.hive.service.cli.thrift.*;
+import org.apache.thrift.TException;
+import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.transport.TTransport;
+import org.apache.thrift.transport.TTransportException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.security.sasl.Sasl;
+import javax.security.sasl.SaslException;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Holds session
+ */
+public class Connection {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(Connection.class);
+  private String host;
+  private int port;
+  private Map<String, String> authParams;
+
+  private TCLIService.Client client = null;
+  private TSessionHandle sessHandle = null;
+  private TProtocolVersion protocol = null;
+  private TTransport transport;
+
+  private DDLDelegator ddl;
+
+  public Connection(String host, int port, Map<String, String> authParams) throws HiveClientException {
+    this.host = host;
+    this.port = port;
+    this.authParams = authParams;
+
+    openConnection();
+    ddl = new DDLDelegator(this);
+  }
+
+  public DDLDelegator ddl() {
+    return ddl;
+  }
+
+  public synchronized void openConnection() throws HiveClientException {
+    try {
+      transport = getTransport();
+      transport.open();
+      client = new TCLIService.Client(new TBinaryProtocol(transport));
+    } catch (TTransportException e) {
+      throw new HiveClientException("Could not establish connecton to "
+          + host + ":" + port + ": " + e.toString(), e);
+    }
+    LOG.info("Hive connection opened");
+    openSession();
+  }
+
+  /**
+   * Based on JDBC implementation of HiveConnection.createBinaryTransport
+   * @return transport
+   * @throws HiveClientException
+   */
+  protected TTransport getTransport() throws HiveClientException, TTransportException {
+    TTransport transport;
+    boolean assumeSubject =
+        Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(authParams
+            .get(Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE));
+    try {
+      if (!Utils.HiveAuthenticationParams.AUTH_SIMPLE.equalsIgnoreCase(authParams.get(Utils.HiveAuthenticationParams.AUTH_TYPE))) {
+        // If Kerberos
+        Map<String, String> saslProps = new HashMap<String, String>();
+        SaslQOP saslQOP = SaslQOP.AUTH;
+        if (authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL)) {
+          if (authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_QOP)) {
+            try {
+              saslQOP = SaslQOP.fromString(authParams.get(Utils.HiveAuthenticationParams.AUTH_QOP));
+            } catch (IllegalArgumentException e) {
+              throw new HiveClientException("Invalid " + Utils.HiveAuthenticationParams.AUTH_QOP +
+                  " parameter. " + e.getMessage(), e);
+            }
+          }
+          saslProps.put(Sasl.QOP, saslQOP.toString());
+          saslProps.put(Sasl.SERVER_AUTH, "true");
+          transport = KerberosSaslHelper.getKerberosTransport(
+              authParams.get(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL), host,
+              HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps,
+              assumeSubject);
+        } else {
+          // If there's a delegation token available then use token based connection
+          String tokenStr = getClientDelegationToken(authParams);
+          if (tokenStr != null) {
+            transport = KerberosSaslHelper.getTokenTransport(tokenStr,
+                host, HiveAuthFactory.getSocketTransport(host, port, 10000), saslProps);
+          } else {
+            // we are using PLAIN Sasl connection with user/password
+            String userName = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
+            String passwd = getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD, Utils.HiveAuthenticationParams.ANONYMOUS_USER);
+            // Note: Thrift returns an SSL socket that is already bound to the specified host:port
+            // Therefore an open called on this would be a no-op later
+            // Hence, any TTransportException related to connecting with the peer are thrown here.
+            // Bubbling them up the call hierarchy so that a retry can happen in openTransport,
+            // if dynamic service discovery is configured.
+            if (isSslConnection()) {
+              // get SSL socket
+              String sslTrustStore = authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE);
+              String sslTrustStorePassword = authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE_PASSWORD);
+              if (sslTrustStore == null || sslTrustStore.isEmpty()) {
+                transport = HiveAuthFactory.getSSLSocket(host, port, 10000);
+              } else {
+                transport = HiveAuthFactory.getSSLSocket(host, port, 10000,
+                    sslTrustStore, sslTrustStorePassword);
+              }
+            } else {
+              // get non-SSL socket transport
+              transport = HiveAuthFactory.getSocketTransport(host, port, 10000);
+            }
+            // Overlay the SASL transport on top of the base socket transport (SSL or non-SSL)
+            transport = PlainSaslHelper.getPlainTransport(userName, passwd, transport);
+          }
+        }
+      } else {
+        //NOSASL
+        return HiveAuthFactory.getSocketTransport(host, port, 10000);
+      }
+    } catch (SaslException e) {
+      throw new HiveClientException("Could not create secure connection to "
+          + host + ": " + e.getMessage(), e);
+    }
+    return transport;
+  }
+
+  private boolean isSslConnection() {
+    return "true".equalsIgnoreCase(authParams.get(Utils.HiveAuthenticationParams.USE_SSL));
+  }
+
+  // Lookup the delegation token. First in the connection URL, then Configuration
+  private String getClientDelegationToken(Map<String, String> jdbcConnConf) throws HiveClientException {
+    String tokenStr = null;
+    if (Utils.HiveAuthenticationParams.AUTH_TOKEN.equalsIgnoreCase(jdbcConnConf.get(Utils.HiveAuthenticationParams.AUTH_TYPE))) {
+      // check delegation token in job conf if any
+      try {
+        tokenStr = ShimLoader.getHadoopShims().
+            getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
+      } catch (IOException e) {
+        throw new HiveClientException("Error reading token ", e);
+      }
+    }
+    return tokenStr;
+  }
+
+  private String getAuthParamDefault(String key, String defaultValue) {
+    if (authParams.containsKey(key))
+      return authParams.get(key);
+    return defaultValue;
+  }
+
+  private synchronized void openSession() throws HiveClientException {
+    //It's possible to set proxy user configuration here
+    TOpenSessionResp openResp = new HiveCall<TOpenSessionResp>(this) {
+      @Override
+      public TOpenSessionResp body() throws HiveClientException {
+        TOpenSessionReq openReq = new TOpenSessionReq();
+        try {
+          return client.OpenSession(openReq);
+        } catch (TException e) {
+          throw new HiveClientException("Unable to open Hive session", e);
+        }
+
+      }
+    }.call();
+    Utils.verifySuccess(openResp.getStatus(), "Unable to open Hive session");
+
+    protocol = openResp.getServerProtocolVersion();
+    sessHandle = openResp.getSessionHandle();
+    LOG.info("Hive session opened");
+  }
+
+  private synchronized void closeSession() throws HiveClientException {
+    if (sessHandle == null) return;
+    TCloseSessionReq closeReq = new TCloseSessionReq(sessHandle);
+    //It's possible to set proxy user configuration here
+    TCloseSessionResp closeResp = null;
+    try {
+      closeResp = client.CloseSession(closeReq);
+      Utils.verifySuccess(closeResp.getStatus(), "Unable to close Hive session");
+    } catch (TException e) {
+      throw new HiveClientException("Unable to close Hive session", e);
+    }
+
+    sessHandle = null;
+    protocol = null;
+    LOG.info("Hive session closed");
+  }
+
+  public synchronized void closeConnection() throws HiveClientException {
+    if (client == null) return;
+    try {
+      closeSession();
+    } catch (HiveClientException e) {
+      LOG.error("Unable to close Hive session: " + e.getMessage());
+    } finally {
+      transport.close();
+      transport = null;
+      client = null;
+      sessHandle = null;
+      protocol = null;
+    }
+    LOG.info("Connection to Hive closed");
+  }
+
+  /**
+   * Execute query
+   * @param cmd query
+   * @param async wait till query finish?
+   * @return handle of operation
+   * @throws HiveClientException
+   */
+  public TOperationHandle execute(final String cmd, final boolean async) throws HiveClientException {
+    TOperationHandle handle = null;
+    for(final String oneCmd : cmd.split(";")) {
+
+      TExecuteStatementResp execResp = new HiveCall<TExecuteStatementResp>(this) {
+        @Override
+        public TExecuteStatementResp body() throws HiveClientException {
+
+          TExecuteStatementReq execReq = null;
+          execReq = new TExecuteStatementReq(getSessHandle(), oneCmd);
+          execReq.setRunAsync(async);
+          execReq.setConfOverlay(new HashMap<String, String>()); //maybe it's hive configuration? use it, Luke!
+          try {
+            return client.ExecuteStatement(execReq);
+          } catch (TException e) {
+            throw new HiveClientException("Unable to submit statement " + cmd, e);
+          }
+
+        }
+      }.call();
+
+      Utils.verifySuccess(execResp.getStatus(), "Unable to submit statement " + cmd);
+      //TODO: check if status have results
+      handle = execResp.getOperationHandle();
+    }
+    if (handle == null) {
+      throw new HiveClientException("Empty command given", null);
+    }
+    return handle;
+  }
+
+  public TOperationHandle executeAsync(String cmd) throws HiveClientException {
+    return execute(cmd, true);
+  }
+
+  public TOperationHandle executeSync(String cmd) throws HiveClientException {
+    return execute(cmd, false);
+  }
+
+  public String getLogs(TOperationHandle handle) {
+    LogsCursor results = new LogsCursor(this, handle);
+    results.reset(); // we have to read from FIRST line, to get
+                     // logs from beginning on every call this function
+    List<String> logLineList = results.getValuesInColumn(0);
+    StringBuilder log = new StringBuilder();
+    for(String line : logLineList) {
+      log.append(line);
+      log.append('\n');
+    }
+    return log.toString();
+  }
+
+  public Cursor getResults(TOperationHandle handle) {
+    Cursor cursor = new Cursor(this, handle);
+    cursor.reset(); // we have to read from FIRST line, to get
+    // logs from beginning on every call this function
+    return cursor;
+  }
+
+  /**
+   * Retrieve status of operation
+   * @param operationHandle handle
+   * @return thrift status response object
+   * @throws HiveClientException
+   */
+  public TGetOperationStatusResp getOperationStatus(final TOperationHandle operationHandle) throws HiveClientException {
+    return new HiveCall<TGetOperationStatusResp>(this) {
+      @Override
+      public TGetOperationStatusResp body() throws HiveClientException {
+
+        TGetOperationStatusReq statusReq = new TGetOperationStatusReq(operationHandle);
+        try {
+          return client.GetOperationStatus(statusReq);
+        } catch (TException e) {
+          throw new HiveClientException("Unable to fetch operation status", e);
+        }
+
+      }
+    }.call();
+//    transportLock.lock();
+//    try {
+//      return client.GetOperationStatus(statusReq);
+//    } catch (TException e) {
+//      throw new HiveClientException("Unable to fetch operation status", e);
+//    } finally {
+//      transportLock.unlock();
+//    }
+  }
+
+  /**
+   * Cancel operation
+   * @param operationHandle operation handle
+   */
+  public void cancelOperation(final TOperationHandle operationHandle) throws HiveClientException {
+    TCancelOperationResp cancelResp = new HiveCall<TCancelOperationResp>(this) {
+      @Override
+      public TCancelOperationResp body() throws HiveClientException {
+        TCancelOperationReq cancelReq = new TCancelOperationReq(operationHandle);
+        try {
+          return client.CancelOperation(cancelReq);
+        } catch (TException e) {
+          throw new HiveClientException("Unable to cancel operation", null);
+        }
+      }
+    }.call();
+    Utils.verifySuccess(cancelResp.getStatus(), "Unable to cancel operation");
+  }
+
+  public int getPort() {
+    return port;
+  }
+
+  public void setPort(int port) {
+    this.port = port;
+  }
+
+  public String getHost() {
+    return host;
+  }
+
+  public void setHost(String host) {
+    this.host = host;
+  }
+
+  public TSessionHandle getSessHandle() throws HiveClientException {
+    if (sessHandle == null)
+      openSession();
+    return sessHandle;
+  }
+
+  public void setSessHandle(TSessionHandle sessHandle) {
+    this.sessHandle = sessHandle;
+  }
+
+  public TCLIService.Client getClient() {
+    return client;
+  }
+
+  public void setClient(TCLIService.Client client) {
+    this.client = client;
+  }
+
+  public TProtocolVersion getProtocol() {
+    return protocol;
+  }
+
+  public void setProtocol(TProtocolVersion protocol) {
+    this.protocol = protocol;
+  }
+
+  public Map<String, String> getAuthParams() {
+    return authParams;
+  }
+
+  public void setAuthParams(Map<String, String> authParams) {
+    this.authParams = authParams;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java
new file mode 100644
index 0000000..6d07067
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionPool.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.hive.utils.ServiceFormattedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class ConnectionPool {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(ConnectionPool.class);
+
+  private static Map<String, Connection> viewSingletonObjects = new HashMap<String, Connection>();
+  /**
+   * Returns HdfsApi object specific to instance
+   * @param context View Context instance
+   * @return Hdfs business delegate object
+   */
+  public static Connection getConnection(ViewContext context) {
+    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
+      viewSingletonObjects.put(context.getInstanceName(), connectToHive(context));
+    return viewSingletonObjects.get(context.getInstanceName());
+  }
+
+  private static Connection connectToHive(ViewContext context) {
+    try {
+      return new Connection(getHiveHost(context), Integer.valueOf(getHivePort(context)), getHiveAuthParams(context));
+    } catch (HiveClientException e) {
+      throw new ServiceFormattedException("Couldn't open connection to Hive: " + e.toString(), e);
+    }
+  }
+
+  public static void setInstance(ViewContext context, Connection api) {
+    viewSingletonObjects.put(context.getInstanceName(), api);
+  }
+
+  private static String getHiveHost(ViewContext context) {
+    return context.getProperties().get("hive.host");
+  }
+
+  private static String getHivePort(ViewContext context) {
+    return context.getProperties().get("hive.port");
+  }
+
+  private static Map<String, String> getHiveAuthParams(ViewContext context) {
+    String auth = context.getProperties().get("hive.auth");
+    Map<String, String> params = new HashMap<String, String>();
+    if (auth == null || auth.isEmpty()) {
+      auth = "auth=NOSASL";
+    }
+    for(String param : auth.split(";")) {
+      String[] keyvalue = param.split("=");
+      if (keyvalue.length != 2) {
+        LOG.error("Can not parse authentication param " + param + " in " + auth);
+        continue;
+      }
+      params.put(keyvalue[0], keyvalue[1]);
+    }
+    return params;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java
new file mode 100644
index 0000000..84987f5
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Cursor.java
@@ -0,0 +1,233 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import static org.apache.hive.service.cli.thrift.TCLIServiceConstants.TYPE_NAMES;
+
+import org.apache.ambari.view.hive.utils.BadRequestFormattedException;
+import org.apache.hive.service.cli.RowSet;
+import org.apache.hive.service.cli.RowSetFactory;
+import org.apache.hive.service.cli.thrift.*;
+import org.apache.thrift.TException;
+import sun.reflect.generics.reflectiveObjects.NotImplementedException;
+
+import java.util.*;
+
+public class Cursor implements Iterator<Row>, Iterable<Row> {
+  private final int FETCH_SIZE = 50;
+
+  private TCLIService.Client client;
+  private TOperationHandle opHandle;
+
+  private RowSet fetched = null;
+  private Iterator<Object[]> fetchedIterator = null;
+  private Connection connection;
+  private boolean resetCursor = false;
+  private ArrayList<ColumnDescription> schema;
+  private long offset;
+  private HashSet<Integer> selectedColumns = new LinkedHashSet<Integer>();
+
+  public Cursor(Connection connection, TOperationHandle opHandle) {
+    this.connection = connection;
+    this.client = connection.getClient();
+    this.opHandle = opHandle;
+  }
+
+  public TOperationHandle getOpHandle() {
+    return opHandle;
+  }
+
+  public void setOpHandle(TOperationHandle opHandle) {
+    this.opHandle = opHandle;
+  }
+
+  private void fetchNextBlock() throws HiveClientException {
+    //fetch another bunch
+    TFetchResultsResp fetchResp = new HiveCall<TFetchResultsResp>(connection) {
+      @Override
+      public TFetchResultsResp body() throws HiveClientException {
+        TFetchOrientation orientation = TFetchOrientation.FETCH_NEXT;
+        if (resetCursor) {
+          orientation = TFetchOrientation.FETCH_FIRST;
+          resetCursor = false;
+          offset = 0;
+        }
+
+        TFetchResultsReq fetchReq = getFetchResultsReq(orientation);
+        try {
+          return client.FetchResults(fetchReq);
+        } catch (TException e) {
+          throw new HiveClientException("Unable to fetch results", e);
+        }
+
+      }
+    }.call();
+    Utils.verifySuccess(fetchResp.getStatus(), "Unable to fetch results");
+    TRowSet results = fetchResp.getResults();
+    fetched = RowSetFactory.create(results, connection.getProtocol());
+    fetchedIterator = fetched.iterator();
+  }
+
+  protected TFetchResultsReq getFetchResultsReq(TFetchOrientation orientation) {
+    return new TFetchResultsReq(opHandle, orientation, FETCH_SIZE);
+  }
+
+  public ArrayList<ColumnDescription> getSchema() throws HiveClientException {
+    if (this.schema == null) {
+      // TODO: extract all HiveCall inline classes to separate files
+      TGetResultSetMetadataResp fetchResp = new HiveCall<TGetResultSetMetadataResp>(connection) {
+        @Override
+        public TGetResultSetMetadataResp body() throws HiveClientException {
+
+          TGetResultSetMetadataReq fetchReq = new TGetResultSetMetadataReq(opHandle);
+          try {
+            return client.GetResultSetMetadata(fetchReq);
+          } catch (TException e) {
+            throw new HiveClientException("Unable to fetch results metadata", e);
+          }
+
+        }
+      }.call();
+      Utils.verifySuccess(fetchResp.getStatus(), "Unable to fetch results metadata");
+      TTableSchema schema = fetchResp.getSchema();
+
+      List<TColumnDesc> thriftColumns = schema.getColumns();
+      ArrayList<ColumnDescription> columnDescriptions = new ArrayList<ColumnDescription>(thriftColumns.size());
+
+      for (TColumnDesc columnDesc : thriftColumns) {
+        String name = columnDesc.getColumnName();
+        String type = TYPE_NAMES.get(columnDesc.getTypeDesc().getTypes().get(0).getPrimitiveEntry().getType());
+        int position = columnDesc.getPosition();
+        columnDescriptions.add(ColumnDescriptionShort.createShortColumnDescription(name, type, position));
+      }
+      if (selectedColumns.size() == 0)
+        this.schema = columnDescriptions;
+      else {
+        ArrayList<ColumnDescription> selectedColumnsSchema = new ArrayList<ColumnDescription>();
+        for (Integer selectedIndex : selectedColumns) {
+          selectedColumnsSchema.add(columnDescriptions.get(selectedIndex));
+        }
+        this.schema = selectedColumnsSchema;
+      }
+    }
+    return this.schema;
+  }
+
+  /**
+   * Get list with all values in one column
+   * @param column column index
+   * @return list of objects in column
+   */
+  public <T> List<T> getValuesInColumn(int column) {
+    LinkedList<T> list = new LinkedList<T>();
+    for (Row row : this) {
+      list.add((T) row.getRow()[column]);
+    }
+    return list;
+  }
+
+  /**
+   * Get logs Result object
+   * @return Result object configured to fetch logs
+   */
+  public Cursor getLogs() {
+    return new LogsCursor(connection, opHandle);
+  }
+
+  public void reset() {
+    fetchedIterator = null;
+    fetched = null;
+    resetCursor = true;
+    offset = 0;
+  }
+
+  @Override
+  public boolean hasNext() {
+    fetchIfNeeded();
+    return fetchedIterator.hasNext();
+  }
+
+  private void fetchIfNeeded() {
+    if (fetchedIterator == null || !fetchedIterator.hasNext()) {
+      try {
+        fetchNextBlock();
+      } catch (HiveClientException e) {
+        throw new HiveClientRuntimeException(e.getMessage(), e);
+      }
+    }
+  }
+
+  @Override
+  public Row next() {
+    if (!hasNext())
+      throw new NoSuchElementException();
+    Row row = new Row(fetchedIterator.next(), selectedColumns);
+    offset ++;
+    return row;
+  }
+
+  @Override
+  public void remove() {
+    throw new NotImplementedException();
+  }
+
+  @Override
+  public Iterator<Row> iterator() {
+    return this;
+  }
+
+//  public int size() {
+//    fetchIfNeeded();
+//    return fetched.numRows();
+//  }
+  public long getOffset() {
+    return offset;
+  }
+
+  public int read(ArrayList<Row> rows, int count) {
+    int read = 0;
+    while(read < count && hasNext()) {
+      rows.add(next());
+      read ++;
+    }
+    return read;
+  }
+
+  public int readRaw(ArrayList<Object[]> rows, int count) {
+    int read = 0;
+    while(read < count && hasNext()) {
+      rows.add(next().getRow());
+      read ++;
+    }
+    return read;
+  }
+
+  public void selectColumns(String columnsRequested) {
+    selectedColumns.clear();
+    if (columnsRequested != null) {
+      for (String columnRequested : columnsRequested.split(",")) {
+        try {
+          selectedColumns.add(Integer.parseInt(columnRequested));
+        } catch (NumberFormatException ex) {
+          throw new BadRequestFormattedException("Columns param should be comma-separated integers", ex);
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java
new file mode 100644
index 0000000..9cb5a1b
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/DDLDelegator.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.hive.service.cli.thrift.*;
+import org.apache.thrift.TException;
+
+import java.util.LinkedList;
+import java.util.List;
+
+public class DDLDelegator {
+  private Connection connection;
+
+  public DDLDelegator(Connection connection) {
+    this.connection = connection;
+  }
+
+  /**
+   * Retrieve list of tables in DB
+   * @param db db name
+   * @return list of table names
+   * @throws HiveClientException
+   */
+  public List<String> getTableList(String db, String like) throws HiveClientException {
+    Cursor cursor = getTableListCursor(db, like);
+    return cursor.getValuesInColumn(0);
+  }
+
+  /**
+   * Retrieve list of tables in DB results set
+   * @param db db name
+   * @return list of table names
+   * @throws HiveClientException
+   */
+   public Cursor getTableListCursor(String db, String like) throws HiveClientException {
+    connection.executeSync(String.format("use %s", db));
+    TOperationHandle handle = connection.executeSync(String.format("show tables like '%s'", like));
+
+    return new Cursor(connection, handle);
+  }
+
+  /**
+   * Retrieve databases
+   * @param like '*' for all
+   * @return list of databases
+   * @throws HiveClientException
+   */
+  public List<String> getDBList(String like) throws HiveClientException {
+    Cursor cursor = getDBListCursor(like);
+    return cursor.getValuesInColumn(0);
+  }
+
+  /**
+   * Retrieve databases results set
+   * @param like '*' for all
+   * @return list of databases
+   * @throws HiveClientException
+   */
+  public Cursor getDBListCursor(String like) throws HiveClientException {
+    TOperationHandle handle = connection.executeSync(String.format("show databases like '%s'", like));
+    return new Cursor(connection, handle);
+  }
+
+  /**
+   * Retrieve table schema
+   * @param db database name
+   * @param table table name
+   * @return schema
+   * @throws HiveClientException
+   */
+  public List<ColumnDescription> getTableDescription(final String db, final String table, String like, boolean extended) throws HiveClientException {
+    List<ColumnDescription> columnDescriptions = new LinkedList<ColumnDescription>();
+    Cursor cursor = getTableDescriptionCursor(db, table, like);
+    for(Row row : cursor) {
+      Object[] rowObjects = row.getRow();
+
+      ColumnDescription columnDescription;
+      if (extended) {
+        //TODO: retrieve sortedBy, clusteredBy, partitioned
+        columnDescription = ColumnDescriptionExtended.createExtendedColumnDescription(
+            (String) rowObjects[3], (String) rowObjects[5], (String) rowObjects[11],
+            false, false, false, (Integer) rowObjects[16]);
+      } else {
+        columnDescription = ColumnDescriptionShort.createShortColumnDescription(
+            (String) rowObjects[3], (String) rowObjects[5], (Integer) rowObjects[16]);
+      }
+      columnDescriptions.add(columnDescription);
+    }
+    return columnDescriptions;
+  }
+
+  /**
+   * Retrieve table schema results set
+   * @param db database name
+   * @param table table name
+   * @return schema
+   * @throws HiveClientException
+   */
+  public Cursor getTableDescriptionCursor(final String db, final String table, String like) throws HiveClientException {
+    if (like == null)
+      like = ".*";
+    else
+      like = ".*" + like + ".*";
+    final String finalLike = like;
+    TGetColumnsResp resp = new HiveCall<TGetColumnsResp>(connection) {
+      @Override
+      public TGetColumnsResp body() throws HiveClientException {
+
+        TGetColumnsReq req = new TGetColumnsReq(conn.getSessHandle());
+        req.setSchemaName(db);
+        req.setTableName(table);
+        req.setColumnName(finalLike);
+        try {
+          return connection.getClient().GetColumns(req);
+        } catch (TException e) {
+          throw new HiveClientException("Unable to get table columns", e);
+        }
+      }
+
+    }.call();
+
+    return new Cursor(connection, resp.getOperationHandle());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java
new file mode 100644
index 0000000..c8b43e2
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveCall.java
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.commons.lang.exception.ExceptionUtils;
+import org.apache.thrift.transport.TTransportException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HiveCall <T> {
+  private final static Logger LOG =
+      LoggerFactory.getLogger(HiveCall.class);
+
+  protected final Connection conn;
+
+  public HiveCall(Connection connection) {
+    this.conn = connection;
+  }
+
+  public abstract T body() throws HiveClientException;
+
+  public T call() throws HiveClientException {
+    T result = null;
+    boolean needRetry = false;
+    int attempts = 0;
+    do {
+      if (needRetry) {
+        needRetry = false;
+        attempts += 1;
+        conn.closeConnection();
+        conn.openConnection();
+      }
+      try {
+
+        synchronized (conn) {
+          result = body();
+        }
+
+      } catch (HiveClientException ex) {
+        Throwable root = ExceptionUtils.getRootCause(ex);
+        if (attempts < 2 && root instanceof TTransportException) {
+          needRetry = true;
+          LOG.error("Retry call because of Transport Exception: " + root.toString());
+          continue;
+        }
+        throw ex;
+      }
+    } while (needRetry);
+    return result;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java
new file mode 100644
index 0000000..9dd04de
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+public class HiveClientException extends Exception {
+  public HiveClientException(String comment, Exception ex) {
+    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java
new file mode 100644
index 0000000..1393012
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveClientRuntimeException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+public class HiveClientRuntimeException extends RuntimeException {
+  public HiveClientRuntimeException(String comment, Exception ex) {
+    super(comment + ((ex == null)?"":(": " + ex.toString())), ex);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java
new file mode 100644
index 0000000..7adbe23
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HiveErrorStatusException.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.hive.service.cli.thrift.TStatusCode;
+
+/**
+ * Some thrift operation done with status 'failed'
+ */
+public class HiveErrorStatusException extends HiveClientException {
+  public HiveErrorStatusException(TStatusCode statusCode, String comment) {
+    super(String.format("Failed with status %s: %s", statusCode, comment), null);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5482d89/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java
new file mode 100644
index 0000000..a6705e4
--- /dev/null
+++ b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/LogsCursor.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.hive.service.cli.thrift.TFetchOrientation;
+import org.apache.hive.service.cli.thrift.TFetchResultsReq;
+import org.apache.hive.service.cli.thrift.TOperationHandle;
+
+public class LogsCursor extends Cursor {
+  public LogsCursor(Connection connection, TOperationHandle opHandle) {
+    super(connection, opHandle);
+  }
+
+  @Override
+  protected TFetchResultsReq getFetchResultsReq(TFetchOrientation orientation) {
+    TFetchResultsReq req = super.getFetchResultsReq(orientation);
+    req.setFetchType((short) 1);
+    return req;
+  }
+}


Mime
View raw message