ambari-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From dbhowm...@apache.org
Subject [10/10] ambari git commit: AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)
Date Wed, 29 Jun 2016 11:46:20 GMT
AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/424afb47
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/424afb47
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/424afb47

Branch: refs/heads/branch-2.4
Commit: 424afb471ac76da5ca4d4cafb93b103b543b910e
Parents: fd9574f
Author: Dipayan Bhowmick <dipayan.bhowmick@gmail.com>
Authored: Wed Jun 29 17:15:04 2016 +0530
Committer: Dipayan Bhowmick <dipayan.bhowmick@gmail.com>
Committed: Wed Jun 29 17:15:56 2016 +0530

----------------------------------------------------------------------
 contrib/views/hueambarimigration/pom.xml        | 142 ++--
 .../configurationcheck/ConfigurationCheck.java  | 182 -----
 .../configurationcheck/ProgressBarStatus.java   |  54 --
 .../controller/hive/HiveHistoryMigration.java   | 222 ------
 .../hive/HiveSavedQueryMigration.java           | 231 ------
 .../controller/pig/PigJobMigration.java         | 201 -----
 .../controller/pig/PigScriptMigration.java      | 208 -----
 .../controller/revertchange/RevertChange.java   | 217 ------
 .../datasource/DataSourceAmbariDatabase.java    |   2 -
 .../datasource/DataSourceHueDatabase.java       |   2 -
 .../historyqueryset/MysqlQuerySetAmbariDB.java  |  46 ++
 .../historyqueryset/OracleQuerySetAmbariDB.java |  44 ++
 .../PostgressQuerySetAmbariDB.java              |  22 +
 .../hive/historyqueryset/QuerySetAmbariDB.java  |  79 ++
 .../instancedetail/MysqlQuerySetAmbariDB.java   |  23 +
 .../instancedetail/OracleQuerySetAmbariDB.java  |  31 +
 .../PostgressQuerySetAmbariDB.java              |  22 +
 .../hive/instancedetail/QuerySetAmbariDB.java   |  48 ++
 .../savedqueryset/MysqlQuerySetAmbariDB.java    |  65 ++
 .../savedqueryset/OracleQuerySetAmbariDB.java   |  58 ++
 .../PostgressQuerySetAmbariDB.java              |  22 +
 .../hive/savedqueryset/QuerySetAmbariDB.java    | 131 ++++
 .../instancedetail/MysqlQuerySetAmbariDB.java   |  23 +
 .../instancedetail/OracleQuerySetAmbariDB.java  |  30 +
 .../PostgressQuerySetAmbariDB.java              |  22 +
 .../pig/instancedetail/QuerySetAmbariDB.java    |  39 +
 .../pig/jobqueryset/MysqlQuerySetAmbariDB.java  |  43 +
 .../pig/jobqueryset/OracleQuerySetAmbariDB.java |  41 +
 .../jobqueryset/PostgressQuerySetAmbariDB.java  |  22 +
 .../pig/jobqueryset/QuerySetAmbariDB.java       |  80 ++
 .../MysqlQuerySetAmbariDB.java                  |  43 +
 .../OracleQuerySetAmbariDB.java                 |  41 +
 .../PostgressQuerySetAmbariDB.java              |  22 +
 .../savedscriptqueryset/QuerySetAmbariDB.java   |  70 ++
 .../hive/historyqueryset/MysqlQuerySet.java     |  23 +
 .../hive/historyqueryset/OracleQuerySet.java    |  61 ++
 .../hive/historyqueryset/PostgressQuerySet.java |  22 +
 .../hive/historyqueryset/QuerySet.java          | 130 ++++
 .../hive/historyqueryset/SqliteQuerySet.java    |  22 +
 .../hive/savedqueryset/MysqlQuerySet.java       |  23 +
 .../hive/savedqueryset/OracleQuerySet.java      |  65 ++
 .../hive/savedqueryset/PostgressQuerySet.java   |  22 +
 .../hive/savedqueryset/QuerySet.java            | 134 ++++
 .../hive/savedqueryset/SqliteQuerySet.java      |  22 +
 .../pig/jobqueryset/MysqlQuerySet.java          |  22 +
 .../pig/jobqueryset/OracleQuerySet.java         |  65 ++
 .../pig/jobqueryset/PostgressQuerySet.java      |  22 +
 .../huequeryset/pig/jobqueryset/QuerySet.java   | 132 ++++
 .../pig/jobqueryset/SqliteQuerySet.java         |  22 +
 .../pig/savedscriptqueryset/MysqlQuerySet.java  |  22 +
 .../pig/savedscriptqueryset/OracleQuerySet.java |  60 ++
 .../savedscriptqueryset/PostgressQuerySet.java  |  67 ++
 .../pig/savedscriptqueryset/QuerySet.java       | 135 ++++
 .../pig/savedscriptqueryset/SqliteQuerySet.java |  24 +
 .../huequeryset/userdetails/MysqlQuerySet.java  |  24 +
 .../huequeryset/userdetails/OracleQuerySet.java |  28 +
 .../userdetails/PostgressQuerySet.java          |  22 +
 .../huequeryset/userdetails/QuerySet.java       |  42 +
 .../huequeryset/userdetails/SqliteQuerySet.java |  22 +
 .../migration/CreateJobId.java                  |  85 ++
 .../migration/CreateJobIdRevertChange.java      |  84 ++
 .../migration/InitiateJobMigration.java         | 102 +++
 .../InitiateJobMigrationforRevertchange.java    |  85 ++
 .../configuration/AllInstanceDetailsAmbari.java |  59 ++
 .../configuration/AmbariDatabaseCheck.java      |  62 ++
 .../configuration/AmbariWebHdfsCheck.java       |  60 ++
 .../configuration/CheckProgresStatus.java       |  70 ++
 .../ConfigurationCheckImplementation.java       | 134 ++++
 .../HiveInstanceDetailsAmbari.java              |  53 ++
 .../HiveInstanceDetailsUtility.java             | 106 +++
 .../configuration/HueDatabaseCheck.java         |  60 ++
 .../configuration/HueHttpUrlCheck.java          |  60 ++
 .../configuration/HueWebHdfsCheck.java          |  60 ++
 .../configuration/PigInstanceDetailsAmbari.java |  61 ++
 .../PigInstanceDetailsUtility.java              |  79 ++
 .../migration/configuration/UserDetailHue.java  |  58 ++
 .../configuration/UserDetailsUtility.java       |  80 ++
 .../HiveHistoryMigrationUtility.java            | 255 ++++++
 ...HiveHistoryQueryMigrationImplementation.java | 551 +++++++++++++
 .../hive/historyquery/HiveHistoryStartJob.java  |  74 ++
 .../HiveSavedQueryMigrationImplementation.java  | 673 ++++++++++++++++
 .../HiveSavedQueryMigrationUtility.java         | 281 +++++++
 .../hive/savedquery/HiveSavedQueryStartJob.java |  77 ++
 .../pigjob/PigJobMigrationImplementation.java   | 532 +++++++++++++
 .../pig/pigjob/PigJobMigrationUtility.java      | 238 ++++++
 .../migration/pig/pigjob/PigJobStartJob.java    |  68 ++
 .../pig/pigscript/PigSavedScriptStartJob.java   |  70 ++
 .../PigScriptMigrationImplementation.java       | 504 ++++++++++++
 .../pigscript/PigScriptMigrationUtility.java    | 229 ++++++
 .../revertchange/RevertChangeStartJob.java      |  70 ++
 .../revertchange/RevertChangeUtility.java       | 225 ++++++
 .../huetoambarimigration/model/PojoHive.java    |  51 --
 .../huetoambarimigration/model/PojoPig.java     |  72 --
 .../persistence/DataStoreStorage.java           | 151 ++++
 .../persistence/InstanceKeyValueStorage.java    | 132 ++++
 .../persistence/KeyValueStorage.java            | 162 ++++
 .../persistence/LocalKeyValueStorage.java       |  70 ++
 .../persistence/PersistentConfiguration.java    |  52 ++
 .../persistence/SmokeTestEntity.java            |  45 ++
 .../persistence/Storage.java                    |  78 ++
 .../utils/ContextConfigurationAdapter.java      | 260 +++++++
 .../persistence/utils/FilteringStrategy.java    |  31 +
 .../persistence/utils/Indexed.java              |  36 +
 .../persistence/utils/ItemNotFound.java         |  25 +
 .../utils/OnlyOwnersFilteringStrategy.java      |  33 +
 .../persistence/utils/Owned.java                |  36 +
 .../persistence/utils/PersonalResource.java     |  22 +
 .../persistence/utils/StorageUtil.java          |  94 +++
 .../resources/CRUDResourceManager.java          | 123 +++
 .../resources/PersonalCRUDResourceManager.java  | 105 +++
 .../resources/SharedCRUDResourceManager.java    |  50 ++
 .../scripts/MigrationResourceManager.java       |  52 ++
 .../scripts/MigrationResourceProvider.java      | 108 +++
 .../scripts/models/ConfigurationModel.java      |  52 ++
 .../resources/scripts/models/HiveModel.java     |  51 ++
 .../resources/scripts/models/InstanceModel.java |  41 +
 .../scripts/models/JobReturnIdModel.java        |  43 +
 .../scripts/models/MigrationModel.java          | 130 ++++
 .../scripts/models/MigrationResponse.java       | 143 ++++
 .../resources/scripts/models/PigModel.java      |  72 ++
 .../scripts/models/ProgressCheckModel.java      |  60 ++
 .../resources/scripts/models/UserModel.java     |  48 ++
 .../configurationcheck/ConfFileReader.java      | 199 -----
 .../service/hive/HiveHistoryQueryImpl.java      | 562 --------------
 .../service/hive/HiveSavedQueryImpl.java        | 778 -------------------
 .../service/pig/PigJobImpl.java                 | 563 --------------
 .../service/pig/PigScriptImpl.java              | 600 --------------
 .../utils/BadRequestFormattedException.java     |  27 +
 .../utils/FilePaginator.java                    |  72 ++
 .../MisconfigurationFormattedException.java     |  47 ++
 .../utils/NotFoundFormattedException.java       |  27 +
 .../utils/ServiceFormattedException.java        | 101 +++
 .../src/main/resources/WEB-INF/web.xml          | 123 ---
 .../src/main/resources/index.jsp                | 119 ---
 .../src/main/resources/ui/.gitignore            |  33 -
 .../src/main/resources/ui/bower.json            |  15 -
 .../main/resources/ui/checkconfiguration.jsp    |  57 --
 .../resources/ui/hivehistoryquerymigration.jsp  | 229 ------
 .../resources/ui/hivesavedquerymigration.jsp    | 240 ------
 .../src/main/resources/ui/homepage.jsp          |  31 -
 .../ui/hueambarimigration-view/.bowerrc         |   4 +
 .../ui/hueambarimigration-view/.editorconfig    |  34 +
 .../ui/hueambarimigration-view/.ember-cli       |  10 +
 .../ui/hueambarimigration-view/.gitignore       |  44 ++
 .../ui/hueambarimigration-view/.jshintrc        |  32 +
 .../ui/hueambarimigration-view/.travis.yml      |  22 +
 .../ui/hueambarimigration-view/.watchmanconfig  |   3 +
 .../ui/hueambarimigration-view/README.md        |  67 ++
 .../app/adapters/application.js                 |  47 ++
 .../ui/hueambarimigration-view/app/app.js       |  38 +
 .../app/components/.gitkeep                     |   0
 .../app/controllers/.gitkeep                    |   0
 .../app/helpers/.gitkeep                        |   0
 .../ui/hueambarimigration-view/app/index.html   |  40 +
 .../hueambarimigration-view/app/models/.gitkeep |   0
 .../app/models/allinstancedetail.js             |  23 +
 .../app/models/ambaridatabase.js                |  24 +
 .../app/models/ambariwebhdfsurl.js              |  24 +
 .../app/models/checkprogress.js                 |  30 +
 .../app/models/getmigrationresultid.js          |  23 +
 .../app/models/hiveinstancedetail.js            |  23 +
 .../app/models/huedatabase.js                   |  24 +
 .../app/models/huehttpurl.js                    |  24 +
 .../app/models/huewebhdfsurl.js                 |  24 +
 .../app/models/piginstancedetail.js             |  23 +
 .../app/models/returnjobid.js                   |  23 +
 .../app/models/returnjobidforrevertchange.js    |  23 +
 .../app/models/startmigration.js                |  23 +
 .../app/models/startrevertchange.js             |  23 +
 .../app/models/usersdetail.js                   |  23 +
 .../ui/hueambarimigration-view/app/resolver.js  |  20 +
 .../ui/hueambarimigration-view/app/router.js    |  38 +
 .../hueambarimigration-view/app/routes/.gitkeep |   0
 .../app/routes/check-configuration.js           | 215 +++++
 .../app/routes/home-page.js                     |  21 +
 .../app/routes/home-page/hive-history.js        | 106 +++
 .../app/routes/home-page/hive-saved-query.js    | 102 +++
 .../app/routes/home-page/pig-job.js             | 102 +++
 .../app/routes/home-page/pig-script.js          | 103 +++
 .../app/routes/home-page/revert-change.js       |  97 +++
 .../hueambarimigration-view/app/routes/index.js |  24 +
 .../app/serializers/application.js              |  19 +
 .../hueambarimigration-view/app/styles/app.css  |  25 +
 .../hueambarimigration-view/app/styles/app.scss |  18 +
 .../app/templates/application.hbs               |  24 +
 .../app/templates/check-configuration.hbs       | 153 ++++
 .../app/templates/home-page.hbs                 |  42 +
 .../app/templates/home-page/hive-history.hbs    | 125 +++
 .../templates/home-page/hive-saved-query.hbs    | 126 +++
 .../app/templates/home-page/pig-job.hbs         | 127 +++
 .../app/templates/home-page/pig-script.hbs      | 127 +++
 .../app/templates/home-page/revert-change.hbs   |  99 +++
 .../ui/hueambarimigration-view/bower.json       |  14 +
 .../config/environment.js                       |  63 ++
 .../hueambarimigration-view/ember-cli-build.js  |  61 ++
 .../ui/hueambarimigration-view/package.json     |  51 ++
 .../public/crossdomain.xml                      |  15 +
 .../hueambarimigration-view/public/robots.txt   |   3 +
 .../ui/hueambarimigration-view/testem.js        |  29 +
 .../ui/hueambarimigration-view/tests/.jshintrc  |  53 ++
 .../tests/helpers/destroy-app.js                |  22 +
 .../tests/helpers/module-for-acceptance.js      |  40 +
 .../tests/helpers/resolver.js                   |  28 +
 .../tests/helpers/start-app.js                  |  35 +
 .../ui/hueambarimigration-view/tests/index.html |  48 ++
 .../tests/test-helper.js                        |  25 +
 .../ui/hueambarimigration-view/vendor/.gitkeep  |   0
 .../src/main/resources/ui/package.json          |  27 -
 .../src/main/resources/ui/pigjobmigration.jsp   | 233 ------
 .../main/resources/ui/pigscriptsmigration.jsp   | 227 ------
 .../src/main/resources/ui/revertchange.jsp      | 203 -----
 .../src/main/resources/view.xml                 |  98 +++
 contrib/views/pom.xml                           |   3 +
 213 files changed, 13164 insertions(+), 5742 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/pom.xml b/contrib/views/hueambarimigration/pom.xml
index f121735..ea34687 100644
--- a/contrib/views/hueambarimigration/pom.xml
+++ b/contrib/views/hueambarimigration/pom.xml
@@ -16,24 +16,32 @@
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari.contrib.views</groupId>
+  <artifactId>hueambarimigration</artifactId>
+  <version>2.4.0.0.0</version>
+  <name>hueambarimigration</name>
+
   <parent>
     <groupId>org.apache.ambari.contrib.views</groupId>
     <artifactId>ambari-contrib-views</artifactId>
     <version>2.4.0.0.0</version>
   </parent>
-  <modelVersion>4.0.0</modelVersion>
-  <artifactId>huetoambari-view</artifactId>
-  <packaging>jar</packaging>
-  <name>Hue To Ambari Migration-view</name>
-  <version>2.4.0.0.0</version>
-  <url>http://maven.apache.org</url>
+
   <properties>
     <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
   </properties>
-
-
   <dependencies>
-
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.6.2</version>
+    </dependency>
+    <dependency>
+      <groupId>com.jayway.jsonpath</groupId>
+      <artifactId>json-path</artifactId>
+      <version>2.0.0</version>
+    </dependency>
     <dependency>
       <groupId>org.easymock</groupId>
       <artifactId>easymock</artifactId>
@@ -43,14 +51,12 @@
     <dependency>
       <groupId>org.apache.ambari</groupId>
       <artifactId>ambari-views</artifactId>
-      <version>2.4.0.0.0</version>
+      <version>[1.7.0.0,)</version>
     </dependency>
-
     <dependency>
       <groupId>javax.servlet</groupId>
       <artifactId>servlet-api</artifactId>
       <version>2.5</version>
-
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -110,9 +116,41 @@
       <groupId>c3p0</groupId>
       <artifactId>c3p0</artifactId>
       <version>0.9.1.2</version>
-
     </dependency>
-
+    <dependency>
+      <groupId>com.jayway.jsonpath</groupId>
+      <artifactId>json-path</artifactId>
+      <version>2.0.0</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey.contribs</groupId>
+      <artifactId>jersey-multipart</artifactId>
+      <version>1.18</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-client</artifactId>
+      <version>1.8</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-core</artifactId>
+      <version>1.18.1</version>
+    </dependency>
+    <dependency>
+      <groupId>com.sun.jersey</groupId>
+      <artifactId>jersey-json</artifactId>
+      <version>1.9</version>
+    </dependency>
+    <dependency>
+      <groupId>com.googlecode.json-simple</groupId>
+      <artifactId>json-simple</artifactId>
+      <version>1.1.1</version>
+    </dependency>
   </dependencies>
 
   <build>
@@ -134,7 +172,7 @@
         <configuration>
           <nodeVersion>v0.12.2</nodeVersion>
           <npmVersion>1.4.8</npmVersion>
-          <workingDirectory>${project.basedir}/src/main/resources/ui</workingDirectory>
+          <workingDirectory>${project.basedir}/src/main/resources/ui/hueambarimigration-view/</workingDirectory>
         </configuration>
         <executions>
           <execution>
@@ -151,7 +189,30 @@
               <goal>npm</goal>
             </goals>
             <configuration>
-              <arguments>install --python="${project.basedir}/src/main/unix/ambari-python-wrap" --unsafe-perm
+              <arguments>install --python="${project.basedir}/../src/main/unix/ambari-python-wrap" --unsafe-perm
+              </arguments>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <artifactId>exec-maven-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <version>1.3.2</version>
+        <executions>
+          <execution>
+            <id>Hueambarimigration-build</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>exec</goal>
+            </goals>
+            <configuration>
+              <workingDirectory>${basedir}/src/main/resources/ui/hueambarimigration-view</workingDirectory>
+              <executable>node/node</executable>
+              <arguments>
+                <argument>node_modules/.bin/ember</argument>
+                <argument>build</argument>
               </arguments>
             </configuration>
           </execution>
@@ -181,61 +242,20 @@
 
       <resource>
         <directory>src/main/resources</directory>
-        <filtering>true</filtering>
+        <filtering>false</filtering>
         <includes>
-          <include>index.jsp</include>
-          <include>image/*.*</include>
+          <include>META-INF/**/*</include>
           <include>view.xml</include>
           <include>view.log4j.properties</include>
-          <include>ui/*.*</include>
-          <include>WEB-INF/web.xml</include>
         </includes>
       </resource>
 
-
-      <resource>
-        <directory>src/main/resources/ui/bower_components/bootstrap/dist/css/</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/css</targetPath>
-      </resource>
-
-      <resource>
-        <directory>src/main/resources/ui/bower_components/bootstrap/dist/fonts/</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/fonts</targetPath>
-      </resource>
-
-
-      <resource>
-        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/css</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/css</targetPath>
-      </resource>
-
-      <resource>
-        <directory>src/main/resources/ui//bower_components/moment/min/</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
-      </resource>
-
-
       <resource>
-        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/js</directory>
+        <directory>src/main/resources/ui/hueambarimigration-view/dist</directory>
         <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
       </resource>
 
       <resource>
-        <directory>src/main/resources/ui/bower_components/jquery/dist/</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
-      </resource>
-      <resource>
-        <directory>src/main/resources/ui/bower_components/bootstrap/dist/js</directory>
-        <filtering>false</filtering>
-        <targetPath>${project.build.outputDirectory}/js</targetPath>
-      </resource>
-      <resource>
         <targetPath>WEB-INF/lib</targetPath>
         <filtering>false</filtering>
         <directory>target/lib</directory>
@@ -243,4 +263,4 @@
     </resources>
   </build>
 
-</project>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
deleted file mode 100644
index 4c2f4ad..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-
-import org.apache.ambari.view.ViewContext;
-
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-
-import org.apache.ambari.view.huetoambarimigration.service.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.log4j.Logger;
-
-
-public class ConfigurationCheck extends HttpServlet {
-  private static final long serialVersionUID = 1L;
-
-  ViewContext view;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-
-  }
-
-  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
-
-    final Logger logger = Logger.getLogger(ConfigurationCheck.class);
-    response.setContentType("text/html");
-    PrintWriter out = response.getWriter();
-
-    out.println("<table class=\"table\">");
-    out.println("<thead><tr><th>Service</th><th>Status</th></tr></thead>");
-    out.println("<tbody>");
-
-    if (ConfFileReader.checkConfigurationForHue(view.getProperties().get("Hue_URL"))) {
-      logger.info("Hue URl connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Ambari" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-    } else {
-      logger.info("Hue URl connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Ambari" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-    }
-
-    if (ConfFileReader.checkConfigurationForAmbari(view.getProperties().get("Ambari_URL"))) {
-
-      logger.info("Ambari URl connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Hue" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-
-    } else {
-
-      logger.info("Ambari URl connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Hue" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-
-    }
-
-    if (ConfFileReader.checkAmbariDatbaseConection(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword"))) {
-
-      logger.info("Ambari Database connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-
-    } else {
-
-      logger.info("Ambari Database connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-
-    }
-    if (ConfFileReader.checkHueDatabaseConnection(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword"))) {
-
-      logger.info("Hue Database connection:- Success");
-      out.println("<tr class=\"success\">");
-      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
-      out.println("<td><h6>" + "OK" + "</h6></td>");
-      out.println("</tr>");
-
-    } else {
-
-      logger.info("Hue Database connection:- Failed");
-      out.println("<tr class=\"danger\">");
-      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
-      out.println("<td><h6>" + "ERROR" + "</h6></td>");
-      out.println("</tr>");
-
-    }
-
-    try {
-
-      if (ConfFileReader.checkNamenodeURIConnectionforambari(view.getProperties().get("namenode_URI_Ambari"))) {
-
-        logger.info("Web hdfs Access to ambari:- Success");
-        out.println("<tr class=\"success\">");
-        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
-        out.println("<td><h6>" + "OK" + "</h6></td>");
-        out.println("</tr>");
-
-      } else {
-
-        logger.info("Web hdfs Access to ambari:- Failed");
-        out.println("<tr class=\"danger\">");
-        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
-        out.println("<td><h6>" + "ERROR" + "</h6></td>");
-        out.println("</tr>");
-
-      }
-    } catch (URISyntaxException e) {
-      logger.error("Error in accessing Webhdfs of Ambari: ", e);
-    }
-
-    try {
-      if (ConfFileReader.checkNamenodeURIConnectionforHue(view.getProperties().get("namenode_URI_Hue"))) {
-
-        logger.info("Web hdfs Access to hue:- Success");
-        out.println("<tr class=\"success\">");
-        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
-        out.println("<td><h6>" + "OK" + "</h6></td>");
-        out.println("</tr>");
-
-      } else {
-
-        logger.info("Web hdfs Access to hue:- Failed");
-        out.println("<tr class=\"danger\">");
-        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
-        out.println("<td><h6>" + "ERROR" + "</h6></td>");
-        out.println("</tr>");
-
-      }
-    } catch (URISyntaxException e) {
-      logger.error("Error in accessing Webhdfs of Hue: " , e);
-    }
-
-    out.println("</tbody></table>");
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
deleted file mode 100644
index 40d63df..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
-
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-
-public class ProgressBarStatus extends HttpServlet {
-
-  private static final long serialVersionUID = 1L;
-
-  public static String TASK_PROGRESS_VARIABLE = "task_progress_session";
-
-
-  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
-
-    HttpSession session = request.getSession(true);
-    Integer param = (Integer) session.getAttribute(TASK_PROGRESS_VARIABLE);
-
-    if (param == null) {
-      param = 0;
-    }
-
-    response.setContentType("text/html");
-    PrintWriter out = response.getWriter();
-    out.println(param + "%");
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
deleted file mode 100755
index 46be3fe..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
+++ /dev/null
@@ -1,222 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.hive;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.hive.HiveHistoryQueryImpl;
-
-public class HiveHistoryMigration extends HttpServlet {
-
-
-  private static final long serialVersionUID = 1031422249396784970L;
-  ViewContext view;
-
-  private String startDate;
-  private String endDate;
-  private String instance;
-  private String username;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-
-  }
-
-  public void doGet(HttpServletRequest req, HttpServletResponse resp)
-    throws ServletException, IOException {
-
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(HiveHistoryMigration.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    /* fetching the variable from the client */
-    username = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-
-    logger.info("--------------------------------------");
-    logger.info("Hive History query Migration started");
-    logger.info("--------------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + username);
-    logger.info("hue username is : " + instance);
-
-    int maxCountOfAmbariDb, i = 0;
-    String time = null;
-    Long epochTime = null;
-    String dirNameforHiveHistroy;
-
-    HiveHistoryQueryImpl hiveHistoryQueryImpl = new HiveHistoryQueryImpl();// creating objects of HiveHistroy implementation
-
-    String[] hiveQuery = new String[1000000];
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
-
-      hiveQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb);
-
-		   /* if No hive query selected from Hue Database according to our search criteria */
-
-      if (hiveQuery[i] == null) {
-
-        logger.info("No queries has been selected acccording to your criteria");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No queries selected according to your criteria</h4>");
-
-      } else {
-        /* If Hive queries are selected based on our search criteria */
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
-        connectionAmbaridb.setAutoCommit(false);
-
-        // for each queries fetched from Hue database//
-
-        for (i = 0; hiveQuery[i] != null; i++) {
-
-          float calc = ((float) (i + 1)) / hiveQuery.length * 100;
-          int progressPercentage = Math.round(calc);
-
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          logger.info("_____________________");
-          logger.info("Loop No." + (i + 1));
-          logger.info("_____________________");
-          logger.info("Hue query that has been fetched" + hiveQuery[i]);
-          int id = 0;
-
-          id = hiveHistoryQueryImpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); // feching table name according to the given instance name
-
-          logger.info("Table name has been fetched from intance name");
-
-          hiveHistoryQueryImpl.writetoFileQueryhql(hiveQuery[i], ConfFileReader.getHomeDir());// writing to .hql file to a temp file on local disk
-
-          logger.info(".hql file created in Temp directory");
-
-          hiveHistoryQueryImpl.writetoFileLogs(ConfFileReader.getHomeDir());// writing to logs file to a temp file on local disk
-
-          logger.info("Log file created in Temp directory");
-
-          maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(view.getProperties().get("ambaridrivername"), connectionAmbaridb, id) + 1);// fetching the maximum count for ambari db to insert
-
-          time = hiveHistoryQueryImpl.getTime();// getting the system current time.
-
-          epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
-
-          dirNameforHiveHistroy = "/user/admin/hive/jobs/hive-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
-
-          logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
-
-          hiveHistoryQueryImpl.insertRowinAmbaridb(view.getProperties().get("ambaridrivername"), dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i);// inserting in ambari database
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            logger.info("kerberose enabled");
-            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
-            logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
-          } else {
-
-            logger.info("kerberose not enabled");
-            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
-            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
-          }
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-    } catch (SQLException e) {
-      logger.error("Sql exception in ambari database: ", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.error("Sql statement are Rolledback");
-      } catch (SQLException e1) {
-        logger.error("Sql rollback exception in ambari database",
-          e1);
-      }
-    } catch (ClassNotFoundException e) {
-      logger.error("Class not found :- " ,e);
-    } catch (ParseException e) {
-      logger.error("Parse Exception : " ,e);
-    } catch (URISyntaxException e) {
-      logger.error("URI Syntax Exception: " ,e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: " ,e);
-    } finally {
-      if (connectionAmbaridb != null) try {
-        connectionAmbaridb.close();
-      } catch (SQLException e) {
-        logger.error("Exception in closing the connection :" ,e);
-      }
-    }
-    //deleteing the temprary files that are created while execution
-    hiveHistoryQueryImpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
-    hiveHistoryQueryImpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
-
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-    logger.info("------------------------------");
-    logger.info("Hive History query Migration Ends");
-    logger.info("------------------------------");
-
-    /* servlet returned to client */
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Query has been migrated to  " + instance + "</h4>");
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
deleted file mode 100644
index d873744..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
+++ /dev/null
@@ -1,231 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.hive;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.*;
-import javax.servlet.http.*;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.hive.HiveSavedQueryImpl;
-
-public class HiveSavedQueryMigration extends HttpServlet {
-
-  private static final long serialVersionUID = 1031422249396784970L;
-
-  ViewContext view;
-  private String startDate;
-  private String endDate;
-  private String instance;
-  private String userName;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-  }
-
-  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
-
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(HiveSavedQueryMigration.class);
-
-    Connection connectionAmbaridb = null;
-    Connection connectionHuedb = null;
-
-    /* fetching from servlet */
-    userName = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-
-    int i = 0;
-
-    logger.info("-------------------------------------");
-    logger.info("Hive saved query Migration started");
-    logger.info("-------------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + instance);
-    logger.info("hue username is : " + userName);
-
-    HiveSavedQueryImpl hivesavedqueryimpl = new HiveSavedQueryImpl();/* creating Implementation object  */
-
-    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
-    String time = null;
-    Long epochtime = null;
-    String dirNameforHiveSavedquery;
-    ArrayList<PojoHive> dbpojoHiveSavedQuery = new ArrayList<PojoHive>();
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
-
-      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(userName, startDate, endDate, connectionHuedb); /* fetching data from hue db and storing it in to a model */
-
-      if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
-
-        logger.info("no Hive saved query has been selected from hue according to your criteria of searching");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No queries selected according to your criteria</h4>");
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
-        connectionAmbaridb.setAutoCommit(false);
-
-        for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
-
-          logger.info("_____________________");
-          logger.info("Loop No." + (i + 1));
-          logger.info("_____________________");
-
-          float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          logger.info("query fetched from hue:-  " + dbpojoHiveSavedQuery.get(i).getQuery());
-
-          int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive saved query  from the given instance name */
-
-          int tableIdHistoryHive = hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive history query from the given instance name */
-
-          logger.info("Table name are fetched from instance name.");
-
-          hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfFileReader.getHomeDir()); /* writing hive query to a local file*/
-
-          hivesavedqueryimpl.writetoFileLogs(ConfFileReader.getHomeDir());/* writing logs to localfile */
-
-          logger.info(".hql and logs file are saved in temporary directory");
-
-          maxcountForHivehistroryAmbaridb = (hivesavedqueryimpl.fetchMaxdsidFromHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdHistoryHive) + 1);/* fetching the maximum ds_id from hive history table*/
-
-          maxCountforSavequeryAmbaridb = (hivesavedqueryimpl.fetchMaxidforSavedQueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdSavedQuery) + 1);/* fetching the maximum ds_id from hive saved query table*/
-
-          time = hivesavedqueryimpl.getTime();/* getting system time */
-
-          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time */
-
-
-          dirNameforHiveSavedquery = "/user/admin/hive/jobs/hive-job-" + maxcountForHivehistroryAmbaridb + "-"
-            + time + "/"; // creating hdfs directory name
-
-          logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
-
-
-          hivesavedqueryimpl.insertRowHiveHistory(view.getProperties().get("ambaridrivername"), dirNameforHiveSavedquery, maxcountForHivehistroryAmbaridb, epochtime, connectionAmbaridb, tableIdHistoryHive, instance, i);// inserting to hive history table
-
-          logger.info("Row inserted in Hive History table.");
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            logger.info("Kerberose Enabled");
-            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs in kerborized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs in kerberoroized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs in kerberoroized cluster
-
-          } else {
-
-            logger.info("Kerberose Not Enabled");
-            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs directory
-            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
-          }
-
-          //inserting into hived saved query table
-          hivesavedqueryimpl.insertRowinSavedQuery(view.getProperties().get("ambaridrivername"), maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-
-      logger.error("SQL exception: ", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("roll back done");
-      } catch (SQLException e1) {
-        logger.error("Rollback error: ", e1);
-
-      }
-    } catch (ClassNotFoundException e1) {
-      logger.error("Class not found : " , e1);
-    } catch (ParseException e) {
-      logger.error("ParseException: " , e);
-    } catch (URISyntaxException e) {
-      logger.error("URISyntaxException: " , e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException:" , e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("Error in connection close", e);
-        }
-    }
-
-
-    hivesavedqueryimpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
-    hivesavedqueryimpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-    logger.info("-------------------------------");
-    logger.info("Hive saved query Migration end");
-    logger.info("--------------------------------");
-
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Saved query has been migrated to  " + instance + "</h4>");
-  }
-}
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java
deleted file mode 100644
index 4b6afdb..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.pig;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Date;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.service.*;
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.pig.PigJobImpl;
-
-public class PigJobMigration extends HttpServlet {
-
-  private static final long serialVersionUID = 1031422249396784970L;
-  ViewContext view;
-  int i = 0;
-  private String userName;
-  private String startDate;
-  private String endDate;
-  private String instance;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-
-  }
-
-  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
-
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(PigJobMigration.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    // fetchinf data from the clients
-    userName = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-
-    logger.info("------------------------------");
-    logger.info("Pig Jobs Migration started");
-    logger.info("------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + userName);
-    logger.info("hue username is : " + instance);
-
-    PigJobImpl pigjobimpl = new PigJobImpl();// creating the implementation object
-    int maxCountforPigScript = 0;
-
-    String time = null, timeIndorder = null;
-    Long epochtime = null;
-    String pigJobDirName;
-    ArrayList<PojoPig> pigJobDbPojo = new ArrayList<PojoPig>();
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
-      pigJobDbPojo = pigjobimpl.fetchFromHueDB(userName, startDate, endDate, connectionHuedb);// fetching the PigJobs details from hue
-
-			/*No Pig Job details has been fetched accordring to search criteria*/
-      if (pigJobDbPojo.size() == 0) {
-
-        logger.info("no Pig Job has been selected from hue according to your criteria of searching");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No Pig Job  selected according to your criteria</h4>");
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-        connectionAmbaridb.setAutoCommit(false);
-
-        for (i = 0; i < pigJobDbPojo.size(); i++) {
-
-          float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          logger.info("Loop No." + (i + 1));
-          logger.info("________________");
-          logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
-
-          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);
-          maxCountforPigScript = (pigjobimpl.fetchMaxIdforPigJob(view.getProperties().get("ambaridrivername"), connectionAmbaridb, fetchPigTablenameInstance) + 1);
-
-          time = pigjobimpl.getTime();
-          timeIndorder = pigjobimpl.getTimeInorder();
-          epochtime = pigjobimpl.getEpochTime();
-
-          pigJobDirName = "/user/admin/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
-
-          pigjobimpl.insertRowPigJob(view.getProperties().get("ambaridrivername"), pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i);
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-
-          } else {
-
-            pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-
-          }
-
-          logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
-
-        }
-        connectionAmbaridb.commit();
-      }
-
-    } catch (SQLException e) {
-      logger.error("sql exception in ambari database:", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("roll back done");
-      } catch (SQLException e1) {
-        logger.error("roll back  exception:",e1);
-      }
-    } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception:",e2);
-    } catch (ParseException e) {
-      logger.error("ParseException: " ,e);
-    } catch (URISyntaxException e) {
-      logger.error("URISyntaxException" ,e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException" ,e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("connection closing exception ", e);
-        }
-    }
-
-    logger.info("------------------------------");
-    logger.info("Pig Job Migration End");
-    logger.info("------------------------------");
-
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Pig jobs has been migrated to  "
-      + instance + "</h4>");
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
deleted file mode 100644
index bb803f3..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.controller.pig;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.model.*;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-import org.apache.ambari.view.huetoambarimigration.service.pig.PigScriptImpl;
-
-public class PigScriptMigration extends HttpServlet {
-
-
-  private static final long serialVersionUID = 1031422249396784970L;
-  ViewContext view;
-  private String startDate;
-  private String endDate;
-  private String instance;
-  private String userName;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-
-  }
-
-  public void doGet(HttpServletRequest req, HttpServletResponse resp)
-    throws ServletException, IOException {
-
-    HttpSession session = req.getSession(true);
-    final Logger logger = Logger.getLogger(PigScriptMigration.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    logger.info("-------------------------------------");
-    logger.info("Pig saved script Migration started");
-    logger.info("-------------------------------------");
-
-    //fethcing data from client
-
-    userName = req.getParameter("username");
-    startDate = req.getParameter("startdate");
-    endDate = req.getParameter("enddate");
-    instance = req.getParameter("instance");
-    int i = 0;
-
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + userName);
-    logger.info("hue username is : " + instance);
-
-    //Reading the configuration file
-    PigScriptImpl pigsavedscriptmigration = new PigScriptImpl();
-
-    int maxcountforsavequery = 0, maxcountforpigsavedscript;
-    String time = null, timetobeInorder = null;
-    Long epochTime = null;
-    String dirNameForPigScript, completeDirandFilePath, pigscriptFilename="";
-    int pigInstanceTableName;
-
-    ArrayList<PojoPig> dbpojoPigSavedscript = new ArrayList<PojoPig>();
-
-    try {
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
-      dbpojoPigSavedscript = pigsavedscriptmigration.fetchFromHueDatabase(userName, startDate, endDate, connectionHuedb, view.getProperties().get("huedrivername"));// Fetching Pig script details from Hue DB
-
-      /* If No Pig Script has been fetched from Hue db according to our search criteria*/
-      if (dbpojoPigSavedscript.size() == 0) {
-
-        logger.info("no Pig script has been selected from hue according to your criteria of searching");
-        resp.setContentType("text/html");
-        PrintWriter out = resp.getWriter();
-        out.println("<br>");
-        out.println("<h4>No Pig Script selected according to your criteria</h4>");
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
-        connectionAmbaridb.setAutoCommit(false);
-        logger.info("loop will continue for " + dbpojoPigSavedscript.size() + "times");
-
-        //for each pig script found in Hue Database
-
-        for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
-
-
-          float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          logger.info("Loop No." + (i + 1));
-          logger.info("________________");
-          logger.info("the title of script:  " + dbpojoPigSavedscript.get(i).getTitle());
-
-          pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);// finding the table name in ambari from the given instance
-
-          maxcountforpigsavedscript = (pigsavedscriptmigration.fetchmaxIdforPigSavedScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, pigInstanceTableName) + 1);// maximum count of the primary key of Pig Script table
-
-          time = pigsavedscriptmigration.getTime();
-
-          timetobeInorder = pigsavedscriptmigration.getTimeInorder();
-
-          epochTime = pigsavedscriptmigration.getEpochTime();
-
-          dirNameForPigScript = "/user/admin/pig/scripts/";
-
-          pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + time + ".pig";
-
-          completeDirandFilePath = dirNameForPigScript + pigscriptFilename;
-
-          pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(), dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), ConfFileReader.getHomeDir(), pigscriptFilename);
-
-          pigsavedscriptmigration.insertRowForPigScript(view.getProperties().get("ambaridrivername"), completeDirandFilePath, maxcountforsavequery, maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(), connectionAmbaridb, pigInstanceTableName, instance, i);
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-            pigsavedscriptmigration.putFileinHdfsSecured(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
-          } else {
-            pigsavedscriptmigration.putFileinHdfs(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
-          }
-
-          logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to Ambari");
-
-          pigsavedscriptmigration.deletePigScriptLocalFile(ConfFileReader.getHomeDir(), pigscriptFilename);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("Sql exception in ambari database", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("rollback done");
-      } catch (SQLException e1) {
-        logger.error("Sql exception while doing roll back", e);
-      }
-    } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception", e2);
-    } catch (ParseException e) {
-      logger.error("ParseException: " , e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: " , e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("connection close exception: ", e);
-        }
-    }
-
-    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-    resp.setContentType("text/html");
-    PrintWriter out = resp.getWriter();
-    out.println("<br>");
-    out.println("<h4>" + i + " Pig Script has been migrated to " + instance + "</h4>");
-
-    logger.info("----------------------------------");
-    logger.info("Pig saved script Migration ends");
-    logger.info("----------------------------------");
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java
deleted file mode 100644
index 2fff19a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.controller.revertchange;
-
-import java.beans.PropertyVetoException;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.net.URISyntaxException;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.List;
-
-import javax.servlet.ServletConfig;
-import javax.servlet.ServletContext;
-import javax.servlet.ServletException;
-import javax.servlet.http.HttpServlet;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
-
-
-public class RevertChange extends HttpServlet {
-
-  private static final long serialVersionUID = 1L;
-  ViewContext view;
-
-  @Override
-  public void init(ServletConfig config) throws ServletException {
-
-    super.init(config);
-    ServletContext context = config.getServletContext();
-    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
-  }
-
-  public boolean stringtoDatecompare(String datefromservlet,
-                                     String datefromfile) throws ParseException {
-
-    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
-    Date date1 = formatter.parse(datefromservlet);
-    Date date2 = formatter.parse(datefromfile);
-    if (date1.compareTo(date2) < 0) {
-      return true;
-    } else {
-      return false;
-    }
-
-  }
-
-  public void removedir(final String dir, final String namenodeuri)
-    throws IOException, URISyntaxException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class
-              .getName());
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class
-              .getName());
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.delete(src, true);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  protected void doGet(HttpServletRequest request,
-                       HttpServletResponse response) throws ServletException, IOException {
-
-    final Logger logger = Logger.getLogger(RevertChange.class);
-
-    logger.info("------------------------------");
-    logger.info("Reverting the changes Start:");
-    logger.info("------------------------------");
-
-    HttpSession session = request.getSession(true);
-    String revertDate = request.getParameter("revertdate");
-    String instance = request.getParameter("instance");
-
-    logger.info("Revert Date " + revertDate);
-    logger.info("instance name " + instance);
-
-    BufferedReader br = null;
-    Connection connectionAmbariDatabase = null;
-
-    try {
-      connectionAmbariDatabase = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-      connectionAmbariDatabase.setAutoCommit(false);
-
-      Statement stmt = null;
-      stmt = connectionAmbariDatabase.createStatement();
-      SAXBuilder builder = new SAXBuilder();
-      File xmlFile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
-      try {
-
-        Document document = (Document) builder.build(xmlFile);
-        Element rootNode = document.getRootElement();
-        List list = rootNode.getChildren("RevertRecord");
-
-        for (int i = 0; i < list.size(); i++) {
-
-          float calc = ((float) (i + 1)) / list.size() * 100;
-          int progressPercentage = Math.round(calc);
-          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
-
-          Element node = (Element) list.get(i);
-
-          if (node.getChildText("instance").equals(instance)) {
-
-            if (stringtoDatecompare(revertDate, node.getChildText("datetime").toString())) {
-
-              String sql = node.getChildText("query");
-              logger.info(sql);
-              stmt.executeUpdate(sql);
-              removedir(node.getChildText("dirname").toString(), view.getProperties().get("namenode_URI_Ambari"));
-              logger.info(node.getChildText("dirname").toString()+" deleted");
-
-            }
-
-          }
-
-        }
-
-        connectionAmbariDatabase.commit();
-
-        response.setContentType("text/html");
-        PrintWriter out = response.getWriter();
-        out.println("<br>");
-        out.println("<h4>" + " The change has been revert back for "
-          + instance + "</h4>");
-
-        session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-        logger.info("------------------------------");
-        logger.info("Reverting the changes End");
-        logger.info("------------------------------");
-
-      } catch (IOException e) {
-        logger.error("IOException: ",e);
-      } catch (ParseException e) {
-        logger.error("ParseException: ",e);
-      } catch (JDOMException e) {
-        logger.error("JDOMException: ",e);
-      } catch (URISyntaxException e) {
-        logger.error("URISyntaxException:  ",e);
-      }
-    } catch (SQLException e1) {
-      logger.error("SqlException  ",e1);
-      try {
-        connectionAmbariDatabase.rollback();
-        logger.info("Rollback done");
-      } catch (SQLException e2) {
-        logger.error("SqlException in Rollback  ",e2);
-      }
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: ",e);
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
index 034c4c6..0d0fc7d 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
@@ -18,8 +18,6 @@
 
 package org.apache.ambari.view.huetoambarimigration.datasource;
 
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
 import com.mchange.v2.c3p0.ComboPooledDataSource;
 
 import java.beans.PropertyVetoException;

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
index 6e40308..ce8aecd 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
@@ -18,8 +18,6 @@
 
 package org.apache.ambari.view.huetoambarimigration.datasource;
 
-import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
-import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
 import com.mchange.v2.c3p0.ComboPooledDataSource;
 
 import java.beans.PropertyVetoException;

http://git-wip-us.apache.org/repos/asf/ambari/blob/424afb47/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
new file mode 100644
index 0000000..c08f009
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
+
+/**
+ * Overriding methods for Mysql specific queries
+ */
+
+public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
+
+  @Override
+  protected String getSqlMaxDSidFromTableId(int id) {
+    return "select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";";
+  }
+
+  @Override
+  protected String getTableIdSqlFromInstanceName() {
+    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
+  }
+
+  @Override
+  protected String getSqlInsertHiveHistory(int id) {
+    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
+  }
+
+  @Override
+  protected String getRevSql(int id, String maxcount) {
+    return "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount + "';";
+  }
+
+}


Mime
View raw message